diff --git a/.asf.yaml b/.asf.yaml index 3130630e80c6b..fac7a10c87f7e 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -20,13 +20,13 @@ github: description: "Apache Airflow - A platform to programmatically author, schedule, and monitor workflows" homepage: https://airflow.apache.org/ - # Social media preview image is not supported by Github API/asf.yaml, need to be uploaded - # manually in Github repository --> Settings --> click "Edit" in "Social preview" + # Social media preview image is not supported by GitHub API/asf.yaml, need to be uploaded + # manually in GitHub repository --> Settings --> click "Edit" in "Social preview" # See also: # https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/customizing-your-repositorys-social-media-preview # social_media_preview: docs/apache-airflow/img/logos/github_repository_social_image.png labels: - # Note that Github only supports <=20 labels/topics per repo! Pipeline will fail if you add more. + # Note that GitHub only supports <=20 labels/topics per repo! Pipeline will fail if you add more. - airflow - apache - apache-airflow @@ -62,6 +62,12 @@ github: merge: false rebase: false + pull_requests: + # allow auto-merge + allow_auto_merge: false + # auto-delete head branches after being merged + del_branch_on_merge: true + protected_branches: main: required_pull_request_reviews: @@ -128,11 +134,15 @@ github: required_approving_review_count: 1 required_linear_history: true required_signatures: false - v2-10-test: + v2-11-stable: + required_pull_request_reviews: + required_approving_review_count: 1 + required_linear_history: true + required_signatures: false + v3-0-stable: required_pull_request_reviews: required_approving_review_count: 1 required_linear_history: true - required_conversation_resolution: true required_signatures: false providers-fab/v1-5: required_pull_request_reviews: @@ -152,6 +162,7 @@ github: - cmarteepants - karenbraganz - gyli + - jroachgolf84 notifications: jobs: jobs@airflow.apache.org diff --git a/.dockerignore b/.dockerignore index c50ed5ae24ee6..96c42ac203186 100644 --- a/.dockerignore +++ b/.dockerignore @@ -38,6 +38,7 @@ !providers/ !task-sdk/ !airflow-ctl/ +!go-sdk/ # Add all "test" distributions !tests @@ -45,6 +46,8 @@ !docker-tests !helm-tests !kubernetes-tests +!task-sdk-tests +!shared/ # Add scripts so that we can use them inside the container !scripts diff --git a/.editorconfig b/.editorconfig index b3084d3f3aa07..bdaedb2630b0b 100644 --- a/.editorconfig +++ b/.editorconfig @@ -58,3 +58,7 @@ indent_size = 2 [*.json] indent_size = 4 + +[*.go] +indent_style = tab +max_line_length = 110 diff --git a/.github/.pre-commit-config.yaml b/.github/.pre-commit-config.yaml new file mode 100644 index 0000000000000..909f0c1cdca3c --- /dev/null +++ b/.github/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +default_stages: [manual] +default_language_version: + python: python311 +minimum_pre_commit_version: '3.2.0' +repos: + - repo: https://github.com/eclipse-csi/octopin + rev: 21360742e352e87450f99e180fdfc2cf774a72a3 + hooks: + - id: pin-versions + name: Pin versions of dependencies in CI workflows (manual) + stages: ['manual'] diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7a97a52539cac..e42a7f52d1694 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -23,7 +23,7 @@ # API /airflow-core/src/airflow/api/ @ephraimbuddy @pierrejeambrun @rawwar @jason810496 -/airflow-core/src/airflow/api_fastapi/ @ephraimbuddy @pierrejeambrun @rawwar @jason810496 @bugraoz93 +/airflow-core/src/airflow/api_fastapi/ @ephraimbuddy @pierrejeambrun @rawwar @jason810496 @bugraoz93 @shubhamraj-git /airflow-core/src/airflow/api_fastapi/execution_api/ @ashb @kaxil @amoghrajesh # Airflow CTL @@ -33,7 +33,20 @@ /airflow-core/src/airflow/api_fastapi/auth/ @vincbeck # UI -/airflow-core/src/airflow/ui/ @bbovenzi @pierrejeambrun @ryanahamilton @jscheffl +/airflow-core/src/airflow/ui/ @bbovenzi @pierrejeambrun @ryanahamilton @jscheffl @shubhamraj-git + +# Translation Owners (i18n) +# Note: Non committer engaged translators are listed in comments prevent making file syntax invalid +# See: https://github.com/apache/airflow/blob/main/airflow-core/src/airflow/ui/public/i18n/README.md#43-engaged-translator +airflow-core/src/airflow/ui/public/i18n/locales/ar/ @shahar1 @hussein-awala # + @ahmadtfarhan +airflow-core/src/airflow/ui/public/i18n/locales/de/ @jscheffl # + @TJaniF @m1racoli +airflow-core/src/airflow/ui/public/i18n/locales/es/ @bbovenzi # + @aoelvp94 +airflow-core/src/airflow/ui/public/i18n/locales/he/ @eladkal @shahar1 @romsharon98 # +@Dev-iL +airflow-core/src/airflow/ui/public/i18n/locales/ko/ @jscheffl @potiuk # + @choo121600 @kgw7401 @0ne-stone +airflow-core/src/airflow/ui/public/i18n/locales/nl/ @BasPH # + @DjVinnii +airflow-core/src/airflow/ui/public/i18n/locales/pl/ @potiuk @mobuchowski # + @kacpermuda +airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/ @Lee-W @jason810496 # + @RoyLee1224 @guan404ming +airflow-core/src/airflow/ui/public/i18n/locales/fr/ @pierrejeambrun @vincbeck # Security/Permissions /airflow-core/src/airflow/security/permissions.py @vincbeck @@ -69,6 +82,7 @@ /providers/edge3/ @jscheffl /providers/fab/ @vincbeck /providers/hashicorp/ @hussein-awala +/providers/keycloak/ @vincbeck @bugraoz93 /providers/openlineage/ @mobuchowski /providers/slack/ @eladkal /providers/smtp/ @hussein-awala @@ -77,7 +91,8 @@ # Dev tools /.github/workflows/ @potiuk @ashb @gopidesupavan -/dev/ @potiuk @ashb @jedcunningham @gopidesupavan +/dev/ @potiuk @ashb @jedcunningham @gopidesupavan @amoghrajesh +/dev/react-plugin-tools/ @pierrejeambrun @bbovenzi /docker-tests/ @potiuk @ashb @gopidesupavan @jason810496 /kubernetes-tests/ @potiuk @ashb @gopidesupavan @jason810496 /helm-tests/ @dstandish @jedcunningham @@ -109,4 +124,8 @@ ISSUE_TRIAGE_PROCESS.rst @eladkal /providers/fab/src/airflow-core/src/airflow/providers/fab/migrations/ @ephraimbuddy # AIP-72 - Task SDK +# Python SDK /task-sdk/ @ashb @kaxil @amoghrajesh + +# Golang SDK +/go-sdk/ @ashb @kaxil @amoghrajesh diff --git a/.github/ISSUE_TEMPLATE/1-airflow_bug_report.yml b/.github/ISSUE_TEMPLATE/1-airflow_bug_report.yml new file mode 100644 index 0000000000000..fcb1e7b5d23c6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1-airflow_bug_report.yml @@ -0,0 +1,144 @@ +--- +name: Airflow Bug report +description: Problems and issues with code in Apache Airflow core +labels: ["kind:bug", "area:core", "needs-triage"] +body: + - type: markdown + attributes: + # yamllint disable rule:line-length + value: " + + Thank you for finding the time to report the problem! + + We really appreciate the community's efforts to improve Airflow. + + Note, you do not need to create an issue if you have a change ready to submit! + + You can open a [pull request](https://github.com/apache/airflow/pulls) immediately instead. +
" + # yamllint enable rule:line-length + - type: dropdown + attributes: + label: Apache Airflow version + description: > + What Apache Airflow version are you using? If you do not see your version, please (ideally) test on + the latest release or main to see if the issue is fixed before reporting it. + multiple: false + options: + - "3.0.4" + - "2.11.0" + - "main (development)" + - "Other Airflow 2 version (please specify below)" + validations: + required: true + - type: input + attributes: + label: If "Other Airflow 2 version" selected, which one? + # yamllint disable rule:line-length + description: > + On what 2.X version of Airflow are you currently experiencing the issue? Remember, you are encouraged to + test with the latest release or on the main branch to verify your issue still exists, especially if + your version is at least a minor version older than the [current stable release](https://airflow.apache.org/docs/apache-airflow/stable/installation/supported-versions.html#version-life-cycle). + # yamllint enable rule:line-length + - type: textarea + attributes: + label: What happened? + description: Describe what happened. + placeholder: > + Please provide the context in which the problem occurred and explain what happened + validations: + required: true + - type: textarea + attributes: + label: What you think should happen instead? + description: What do you think went wrong? + placeholder: > + Please explain why you think the behaviour is erroneous. It is extremely helpful if you copy&paste + the fragment of logs showing the exact error messages or wrong behaviour and screenshots for + UI problems or YouTube link to a video of you demonstrating the problem. You can include files by + dragging and dropping them here. + - type: textarea + attributes: + label: How to reproduce + description: > + What should we do to reproduce the problem? If you are not able to provide a reproducible case, + please open a [discussion](https://github.com/apache/airflow/discussions) instead. + placeholder: > + Please make sure you provide a reproducible step-by-step case of how to reproduce the problem + as minimally and precisely as possible. Keep in mind we do not have access to your cluster or DAGs. + Remember that non-reproducible issues will be closed! Opening a discussion is recommended as a + first step. + validations: + required: true + - type: input + attributes: + label: Operating System + description: What Operating System are you using? + placeholder: "You can get it via `cat /etc/os-release` for example" + validations: + required: true + - type: textarea + attributes: + label: Versions of Apache Airflow Providers + description: What Apache Airflow Providers versions are you using? + placeholder: You can use `pip freeze | grep apache-airflow-providers` (you can leave only relevant ones) + - type: dropdown + attributes: + label: Deployment + description: > + What kind of deployment do you have? If you use a Managed Service, consider first using regular + channels of reporting issues for the service. + multiple: false + options: + - "Official Apache Airflow Helm Chart" + - "Other 3rd-party Helm chart" + - "Docker-Compose" + - "Other Docker-based deployment" + - "Virtualenv installation" + - "Astronomer" + - "Google Cloud Composer" + - "Amazon (AWS) MWAA" + - "Microsoft ADF Managed Airflow" + - "Other" + validations: + required: true + - type: textarea + attributes: + label: Deployment details + description: Additional description of your deployment. + placeholder: > + Enter any relevant details of your deployment. Especially version of your tools, + software (docker-compose, helm, k8s, etc.), any customisation and configuration you added. + - type: textarea + attributes: + label: Anything else? + description: Anything else we need to know? + placeholder: > + How often does this problem occur? (Once? Every time? Only when certain conditions are met?) + Any relevant logs to include? Put them here inside fenced + ``` ``` blocks or inside a foldable details tag if it's long: +
x.log lots of stuff
+ - type: checkboxes + attributes: + label: Are you willing to submit PR? + description: > + This is absolutely not required, but we are happy to guide you in the contribution process + especially if you already have a good understanding of how to implement the fix. + Airflow is a community-managed project and we love to bring new contributors in. + Find us in #new-contributors on Slack! + options: + - label: Yes I am willing to submit a PR! + - type: checkboxes + attributes: + label: Code of Conduct + description: > + The Code of Conduct helps create a safe space for everyone. We require + that everyone agrees to it. + options: + - label: > + I agree to follow this project's + [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md) + required: true + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/2-feature_request.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/feature_request.yml rename to .github/ISSUE_TEMPLATE/2-feature_request.yml diff --git a/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml b/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml new file mode 100644 index 0000000000000..844ea18ea4d89 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml @@ -0,0 +1,232 @@ +--- +name: Airflow Providers Bug report +description: Problems and issues with code in Apache Airflow Providers +labels: ["kind:bug", "area:providers", "needs-triage"] +body: + - type: markdown + attributes: + # yamllint disable rule:line-length + value: " + + Thank you for finding the time to report a problem! + + We really appreciate the community's efforts to improve Airflow. + + Note, you do not need to create an issue if you have a change ready to submit! + + You can open a [pull request](https://github.com/apache/airflow/pulls) immediately instead. +
" + # yamllint enable rule:line-length + - type: dropdown + attributes: + label: Apache Airflow Provider(s) + description: Provider(s) that the issue report is about (you can choose more than one) + multiple: true + options: + - airbyte + - alibaba + - amazon + - apache-beam + - apache-cassandra + - apache-drill + - apache-druid + - apache-flink + - apache-hdfs + - apache-hive + - apache-iceberg + - apache-impala + - apache-kafka + - apache-kylin + - apache-livy + - apache-pig + - apache-pinot + - apache-spark + - apache-tinkerpop + - apprise + - arangodb + - asana + - atlassian-jira + - celery + - cloudant + - cncf-kubernetes + - cohere + - common-compat + - common-io + - common-messaging + - common-sql + - databricks + - datadog + - dbt-cloud + - dingding + - discord + - docker + - edge3 + - elasticsearch + - exasol + - fab + - facebook + - ftp + - git + - github + - google + - grpc + - hashicorp + - http + - imap + - influxdb + - jdbc + - jenkins + - keycloak + - microsoft-azure + - microsoft-mssql + - microsoft-psrp + - microsoft-winrm + - mongo + - mysql + - neo4j + - odbc + - openai + - openfaas + - openlineage + - opensearch + - opsgenie + - oracle + - pagerduty + - papermill + - pgvector + - pinecone + - postgres + - presto + - qdrant + - redis + - salesforce + - samba + - segment + - sendgrid + - sftp + - singularity + - slack + - smtp + - snowflake + - sqlite + - ssh + - standard + - tableau + - telegram + - teradata + - trino + - vertica + - weaviate + - yandex + - ydb + - zendesk + validations: + required: true + - type: textarea + attributes: + label: Versions of Apache Airflow Providers + description: What Apache Airflow Providers versions are you using? + placeholder: You can use `pip freeze | grep apache-airflow-providers` (you can leave only relevant ones) + - type: input + attributes: + label: Apache Airflow version + description: > + What Apache Airflow version are you using? + [Only Airflow 2 is supported](https://github.com/apache/airflow#version-life-cycle) for bugs. + validations: + required: true + - type: input + attributes: + label: Operating System + description: What Operating System are you using? + placeholder: "You can get it via `cat /etc/os-release` for example" + validations: + required: true + - type: dropdown + attributes: + label: Deployment + description: > + What kind of deployment do you have? If you use a Managed Service, consider first using regular + channels of reporting issues for the service. + multiple: false + options: + - "Official Apache Airflow Helm Chart" + - "Other 3rd-party Helm chart" + - "Docker-Compose" + - "Other Docker-based deployment" + - "Virtualenv installation" + - "Astronomer" + - "Google Cloud Composer" + - "Amazon (AWS) MWAA" + - "Microsoft ADF Managed Airflow" + - "Other" + validations: + required: true + - type: textarea + attributes: + label: Deployment details + description: Additional description of your deployment. + placeholder: > + Enter any relevant details of your deployment. Especially version of your tools, + software (docker-compose, helm, k8s, etc.), any customisation and configuration you added. + - type: textarea + attributes: + label: What happened + description: Describe what happened. + placeholder: > + Please provide the context in which the problem occurred and explain what happened + - type: textarea + attributes: + label: What you think should happen instead + description: What do you think went wrong? + placeholder: > + Please explain why you think the behaviour is erroneous. It is extremely helpful if you copy&paste + the fragment of logs showing the exact error messages or wrong behaviour and screenshots for + UI problems or YouTube link to a video of you demonstrating the problem. You can include files by + dragging and dropping them here. + - type: textarea + attributes: + label: How to reproduce + description: > + What should we do to reproduce the problem? If you are not able to provide a reproducible case, + please open a [Discussion](https://github.com/apache/airflow/discussions) instead. + placeholder: > + Please make sure you provide a reproducible step-by-step case of how to reproduce the problem + as minimally and precisely as possible. Keep in mind we do not have access to your cluster or + DAGs. Remember that non-reproducible issues will be closed! Opening a discussion is + recommended as a first step. + validations: + required: true + - type: textarea + attributes: + label: Anything else + description: Anything else we need to know? + placeholder: > + How often does this problem occur? (Once? Every time? Only when certain conditions are met?) + Any relevant logs to include? Put them here inside fenced + ``` ``` blocks or inside a foldable details tag if it's long: +
x.log lots of stuff
+ - type: checkboxes + attributes: + label: Are you willing to submit PR? + description: > + This is absolutely not required, but we are happy to guide you in the contribution process + especially if you already have a good understanding of how to implement the fix. + Airflow is a community-managed project and we love to bring new contributors in. + Find us in #new-contributors on Slack! + options: + - label: Yes I am willing to submit a PR! + - type: checkboxes + attributes: + label: Code of Conduct + description: > + The Code of Conduct helps create a safe space for everyone. We require + that everyone agrees to it. + options: + - label: > + I agree to follow this project's + [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md) + required: true + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/ISSUE_TEMPLATE/4-airflow_helmchart_bug_report.yml b/.github/ISSUE_TEMPLATE/4-airflow_helmchart_bug_report.yml new file mode 100644 index 0000000000000..64efe5ac16a3d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/4-airflow_helmchart_bug_report.yml @@ -0,0 +1,144 @@ +--- +name: Airflow Helm Chart Bug report +description: Problems and issues with the Apache Airflow Official Helm Chart +labels: ["kind:bug", "area:helm-chart", "needs-triage"] +body: + - type: markdown + attributes: + # yamllint disable rule:line-length + value: " + + Thank you for finding the time to report the problem! + + We really appreciate the community's efforts to improve Airflow. + + Note that this issue is only for the + [Official Apache Airflow Helm Chart](https://airflow.apache.org/docs/helm-chart/stable/index.html). + If you use another 3rd-party Chart, you should report your issue in the repo of that chart instead. + + Note, you do not need to create an issue if you have a change ready to submit! + + You can open a [pull request](https://github.com/apache/airflow/pulls) immediately instead. +
" + # yamllint enable rule:line-length + - type: dropdown + attributes: + label: Official Helm Chart version + description: > + What Apache Airflow Helm Chart version are you using? + multiple: false + options: + - "1.18.0 (latest released)" + - "1.17.0" + - "1.16.0" + - "1.15.0" + - "1.14.0" + - "1.13.1" + - "1.13.0" + - "1.12.0" + - "1.11.0" + - "1.10.0" + - "1.9.0" + - "1.8.0" + - "1.7.0" + - "1.6.0" + - "1.5.0" + - "1.4.0" + - "1.3.0" + - "1.2.0" + - "1.1.0" + - "1.0.0" + - "main (development)" + validations: + required: true + - type: input + attributes: + label: Apache Airflow version + description: > + What Apache Airflow version are you using? + [Only Airflow 2 is supported](https://github.com/apache/airflow#version-life-cycle) for bugs. + validations: + required: true + - type: input + attributes: + label: Kubernetes Version + description: Which Kubernetes Version do you use? + validations: + required: true + - type: textarea + attributes: + label: Helm Chart configuration + description: Additional description of your Helm Chart configuration. + placeholder: > + Enter any relevant details of your Helm Chart configuration. Maybe you can + paste your `values.yaml` or important parts of it here? Make sure to surround the code + you paste with ``` ```. + - type: textarea + attributes: + label: Docker Image customizations + description: What are the specific modification you've made in your image? + placeholder: > + Did you extend or customise the official Airflow image? Did you add any packages? Maybe + you can share a link to your image, or copy the Dockerfile and `docker build` commands + you used to build the image? Make sure to surround the code you paste with ``` ```. + - type: textarea + attributes: + label: What happened + description: Describe what happened. + placeholder: > + Please provide the context in which the problem occurred and explain what happened + - type: textarea + attributes: + label: What you think should happen instead + description: What do you think went wrong? + placeholder: > + Please explain why you think the behaviour is erroneous. It is extremely helpful if you copy&paste + the fragment of logs showing the exact error messages or wrong behaviour and screenshots for + UI problems or YouTube link to a video of you demonstrating the problem. You can include files by + dragging and dropping them here. + - type: textarea + attributes: + label: How to reproduce + description: > + What should we do to reproduce the problem? If you are not able to provide a reproducible case, + please open a [Discussion](https://github.com/apache/airflow/discussions) instead. + placeholder: > + Please make sure you provide a reproducible step-by-step case of how to reproduce the problem + as minimally and precisely as possible. Keep in mind we do not have access to your cluster or DAGs. + Remember that non-reproducible issues will be closed! Opening a discussion is recommended as a + first step. + validations: + required: true + - type: textarea + attributes: + label: Anything else + description: Anything else we need to know? + placeholder: > + How often does this problem occur? (Once? Every time? Only when certain conditions are met?) + Any relevant logs to include? Put them here inside fenced + ``` ``` blocks or inside a foldable details tag if it's long: +
x.log lots of stuff
+ - type: checkboxes + attributes: + label: Are you willing to submit PR? + description: > + This is absolutely not required, but we are happy to guide you in the contribution process + especially if you already have a good understanding of how to implement the fix. + Airflow is a community-managed project and we love to bring new contributors in. + Find us in #new-contributors on Slack! + options: + - label: Yes I am willing to submit a PR! + - type: checkboxes + attributes: + label: Code of Conduct + description: > + The Code of Conduct helps create a safe space for everyone. We require + that everyone agrees to it. + options: + - label: > + I agree to follow this project's + [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md) + required: true + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/ISSUE_TEMPLATE/airflow_doc_issue_report.yml b/.github/ISSUE_TEMPLATE/5-airflow_doc_issue_report.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/airflow_doc_issue_report.yml rename to .github/ISSUE_TEMPLATE/5-airflow_doc_issue_report.yml diff --git a/.github/ISSUE_TEMPLATE/~free_form.yml b/.github/ISSUE_TEMPLATE/6-free_form.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/~free_form.yml rename to .github/ISSUE_TEMPLATE/6-free_form.yml diff --git a/.github/ISSUE_TEMPLATE/airflow_bug_report.yml b/.github/ISSUE_TEMPLATE/airflow_bug_report.yml deleted file mode 100644 index 25fb14cba10e4..0000000000000 --- a/.github/ISSUE_TEMPLATE/airflow_bug_report.yml +++ /dev/null @@ -1,144 +0,0 @@ ---- -name: Airflow Bug report -description: Problems and issues with code in Apache Airflow core -labels: ["kind:bug", "area:core", "needs-triage"] -body: - - type: markdown - attributes: - # yamllint disable rule:line-length - value: " - - Thank you for finding the time to report the problem! - - We really appreciate the community's efforts to improve Airflow. - - Note, you do not need to create an issue if you have a change ready to submit! - - You can open a [pull request](https://github.com/apache/airflow/pulls) immediately instead. -
" - # yamllint enable rule:line-length - - type: dropdown - attributes: - label: Apache Airflow version - description: > - What Apache Airflow version are you using? If you do not see your version, please (ideally) test on - the latest release or main to see if the issue is fixed before reporting it. - multiple: false - options: - - "2.10.5" - - "3.0.0" - - "main (development)" - - "Other Airflow 2 version (please specify below)" - validations: - required: true - - type: input - attributes: - label: If "Other Airflow 2 version" selected, which one? - # yamllint disable rule:line-length - description: > - On what 2.X version of Airflow are you currently experiencing the issue? Remember, you are encouraged to - test with the latest release or on the main branch to verify your issue still exists, especially if - your version is at least a minor version older than the [current stable release](https://airflow.apache.org/docs/apache-airflow/stable/installation/supported-versions.html#version-life-cycle). - # yamllint enable rule:line-length - - type: textarea - attributes: - label: What happened? - description: Describe what happened. - placeholder: > - Please provide the context in which the problem occurred and explain what happened - validations: - required: true - - type: textarea - attributes: - label: What you think should happen instead? - description: What do you think went wrong? - placeholder: > - Please explain why you think the behaviour is erroneous. It is extremely helpful if you copy&paste - the fragment of logs showing the exact error messages or wrong behaviour and screenshots for - UI problems or YouTube link to a video of you demonstrating the problem. You can include files by - dragging and dropping them here. - - type: textarea - attributes: - label: How to reproduce - description: > - What should we do to reproduce the problem? If you are not able to provide a reproducible case, - please open a [discussion](https://github.com/apache/airflow/discussions) instead. - placeholder: > - Please make sure you provide a reproducible step-by-step case of how to reproduce the problem - as minimally and precisely as possible. Keep in mind we do not have access to your cluster or DAGs. - Remember that non-reproducible issues will be closed! Opening a discussion is recommended as a - first step. - validations: - required: true - - type: input - attributes: - label: Operating System - description: What Operating System are you using? - placeholder: "You can get it via `cat /etc/os-release` for example" - validations: - required: true - - type: textarea - attributes: - label: Versions of Apache Airflow Providers - description: What Apache Airflow Providers versions are you using? - placeholder: You can use `pip freeze | grep apache-airflow-providers` (you can leave only relevant ones) - - type: dropdown - attributes: - label: Deployment - description: > - What kind of deployment do you have? If you use a Managed Service, consider first using regular - channels of reporting issues for the service. - multiple: false - options: - - "Official Apache Airflow Helm Chart" - - "Other 3rd-party Helm chart" - - "Docker-Compose" - - "Other Docker-based deployment" - - "Virtualenv installation" - - "Astronomer" - - "Google Cloud Composer" - - "Amazon (AWS) MWAA" - - "Microsoft ADF Managed Airflow" - - "Other" - validations: - required: true - - type: textarea - attributes: - label: Deployment details - description: Additional description of your deployment. - placeholder: > - Enter any relevant details of your deployment. Especially version of your tools, - software (docker-compose, helm, k8s, etc.), any customisation and configuration you added. - - type: textarea - attributes: - label: Anything else? - description: Anything else we need to know? - placeholder: > - How often does this problem occur? (Once? Every time? Only when certain conditions are met?) - Any relevant logs to include? Put them here inside fenced - ``` ``` blocks or inside a foldable details tag if it's long: -
x.log lots of stuff
- - type: checkboxes - attributes: - label: Are you willing to submit PR? - description: > - This is absolutely not required, but we are happy to guide you in the contribution process - especially if you already have a good understanding of how to implement the fix. - Airflow is a community-managed project and we love to bring new contributors in. - Find us in #new-contributors on Slack! - options: - - label: Yes I am willing to submit a PR! - - type: checkboxes - attributes: - label: Code of Conduct - description: > - The Code of Conduct helps create a safe space for everyone. We require - that everyone agrees to it. - options: - - label: > - I agree to follow this project's - [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md) - required: true - - type: markdown - attributes: - value: "Thanks for completing our form!" diff --git a/.github/ISSUE_TEMPLATE/airflow_helmchart_bug_report.yml b/.github/ISSUE_TEMPLATE/airflow_helmchart_bug_report.yml deleted file mode 100644 index 3c8b7e68a82bf..0000000000000 --- a/.github/ISSUE_TEMPLATE/airflow_helmchart_bug_report.yml +++ /dev/null @@ -1,142 +0,0 @@ ---- -name: Airflow Helm Chart Bug report -description: Problems and issues with the Apache Airflow Official Helm Chart -labels: ["kind:bug", "area:helm-chart", "needs-triage"] -body: - - type: markdown - attributes: - # yamllint disable rule:line-length - value: " - - Thank you for finding the time to report the problem! - - We really appreciate the community's efforts to improve Airflow. - - Note that this issue is only for the - [Official Apache Airflow Helm Chart](https://airflow.apache.org/docs/helm-chart/stable/index.html). - If you use another 3rd-party Chart, you should report your issue in the repo of that chart instead. - - Note, you do not need to create an issue if you have a change ready to submit! - - You can open a [pull request](https://github.com/apache/airflow/pulls) immediately instead. -
" - # yamllint enable rule:line-length - - type: dropdown - attributes: - label: Official Helm Chart version - description: > - What Apache Airflow Helm Chart version are you using? - multiple: false - options: - - "1.16.0 (latest released)" - - "1.15.0" - - "1.14.0" - - "1.13.1" - - "1.13.0" - - "1.12.0" - - "1.11.0" - - "1.10.0" - - "1.9.0" - - "1.8.0" - - "1.7.0" - - "1.6.0" - - "1.5.0" - - "1.4.0" - - "1.3.0" - - "1.2.0" - - "1.1.0" - - "1.0.0" - - "main (development)" - validations: - required: true - - type: input - attributes: - label: Apache Airflow version - description: > - What Apache Airflow version are you using? - [Only Airflow 2 is supported](https://github.com/apache/airflow#version-life-cycle) for bugs. - validations: - required: true - - type: input - attributes: - label: Kubernetes Version - description: Which Kubernetes Version do you use? - validations: - required: true - - type: textarea - attributes: - label: Helm Chart configuration - description: Additional description of your Helm Chart configuration. - placeholder: > - Enter any relevant details of your Helm Chart configuration. Maybe you can - paste your `values.yaml` or important parts of it here? Make sure to surround the code - you paste with ``` ```. - - type: textarea - attributes: - label: Docker Image customizations - description: What are the specific modification you've made in your image? - placeholder: > - Did you extend or customise the official Airflow image? Did you add any packages? Maybe - you can share a link to your image, or copy the Dockerfile and `docker build` commands - you used to build the image? Make sure to surround the code you paste with ``` ```. - - type: textarea - attributes: - label: What happened - description: Describe what happened. - placeholder: > - Please provide the context in which the problem occurred and explain what happened - - type: textarea - attributes: - label: What you think should happen instead - description: What do you think went wrong? - placeholder: > - Please explain why you think the behaviour is erroneous. It is extremely helpful if you copy&paste - the fragment of logs showing the exact error messages or wrong behaviour and screenshots for - UI problems or YouTube link to a video of you demonstrating the problem. You can include files by - dragging and dropping them here. - - type: textarea - attributes: - label: How to reproduce - description: > - What should we do to reproduce the problem? If you are not able to provide a reproducible case, - please open a [Discussion](https://github.com/apache/airflow/discussions) instead. - placeholder: > - Please make sure you provide a reproducible step-by-step case of how to reproduce the problem - as minimally and precisely as possible. Keep in mind we do not have access to your cluster or DAGs. - Remember that non-reproducible issues will be closed! Opening a discussion is recommended as a - first step. - validations: - required: true - - type: textarea - attributes: - label: Anything else - description: Anything else we need to know? - placeholder: > - How often does this problem occur? (Once? Every time? Only when certain conditions are met?) - Any relevant logs to include? Put them here inside fenced - ``` ``` blocks or inside a foldable details tag if it's long: -
x.log lots of stuff
- - type: checkboxes - attributes: - label: Are you willing to submit PR? - description: > - This is absolutely not required, but we are happy to guide you in the contribution process - especially if you already have a good understanding of how to implement the fix. - Airflow is a community-managed project and we love to bring new contributors in. - Find us in #new-contributors on Slack! - options: - - label: Yes I am willing to submit a PR! - - type: checkboxes - attributes: - label: Code of Conduct - description: > - The Code of Conduct helps create a safe space for everyone. We require - that everyone agrees to it. - options: - - label: > - I agree to follow this project's - [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md) - required: true - - type: markdown - attributes: - value: "Thanks for completing our form!" diff --git a/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml b/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml deleted file mode 100644 index a2461de6285e7..0000000000000 --- a/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml +++ /dev/null @@ -1,230 +0,0 @@ ---- -name: Airflow Providers Bug report -description: Problems and issues with code in Apache Airflow Providers -labels: ["kind:bug", "area:providers", "needs-triage"] -body: - - type: markdown - attributes: - # yamllint disable rule:line-length - value: " - - Thank you for finding the time to report a problem! - - We really appreciate the community's efforts to improve Airflow. - - Note, you do not need to create an issue if you have a change ready to submit! - - You can open a [pull request](https://github.com/apache/airflow/pulls) immediately instead. -
" - # yamllint enable rule:line-length - - type: dropdown - attributes: - label: Apache Airflow Provider(s) - description: Provider(s) that the issue report is about (you can choose more than one) - multiple: true - options: - - airbyte - - alibaba - - amazon - - apache-beam - - apache-cassandra - - apache-drill - - apache-druid - - apache-flink - - apache-hdfs - - apache-hive - - apache-iceberg - - apache-impala - - apache-kafka - - apache-kylin - - apache-livy - - apache-pig - - apache-pinot - - apache-spark - - apprise - - arangodb - - asana - - atlassian-jira - - celery - - cloudant - - cncf-kubernetes - - cohere - - common-compat - - common-io - - common-messaging - - common-sql - - databricks - - datadog - - dbt-cloud - - dingding - - discord - - docker - - edge3 - - elasticsearch - - exasol - - fab - - facebook - - ftp - - git - - github - - google - - grpc - - hashicorp - - http - - imap - - influxdb - - jdbc - - jenkins - - microsoft-azure - - microsoft-mssql - - microsoft-psrp - - microsoft-winrm - - mongo - - mysql - - neo4j - - odbc - - openai - - openfaas - - openlineage - - opensearch - - opsgenie - - oracle - - pagerduty - - papermill - - pgvector - - pinecone - - postgres - - presto - - qdrant - - redis - - salesforce - - samba - - segment - - sendgrid - - sftp - - singularity - - slack - - smtp - - snowflake - - sqlite - - ssh - - standard - - tableau - - telegram - - teradata - - trino - - vertica - - weaviate - - yandex - - ydb - - zendesk - validations: - required: true - - type: textarea - attributes: - label: Versions of Apache Airflow Providers - description: What Apache Airflow Providers versions are you using? - placeholder: You can use `pip freeze | grep apache-airflow-providers` (you can leave only relevant ones) - - type: input - attributes: - label: Apache Airflow version - description: > - What Apache Airflow version are you using? - [Only Airflow 2 is supported](https://github.com/apache/airflow#version-life-cycle) for bugs. - validations: - required: true - - type: input - attributes: - label: Operating System - description: What Operating System are you using? - placeholder: "You can get it via `cat /etc/os-release` for example" - validations: - required: true - - type: dropdown - attributes: - label: Deployment - description: > - What kind of deployment do you have? If you use a Managed Service, consider first using regular - channels of reporting issues for the service. - multiple: false - options: - - "Official Apache Airflow Helm Chart" - - "Other 3rd-party Helm chart" - - "Docker-Compose" - - "Other Docker-based deployment" - - "Virtualenv installation" - - "Astronomer" - - "Google Cloud Composer" - - "Amazon (AWS) MWAA" - - "Microsoft ADF Managed Airflow" - - "Other" - validations: - required: true - - type: textarea - attributes: - label: Deployment details - description: Additional description of your deployment. - placeholder: > - Enter any relevant details of your deployment. Especially version of your tools, - software (docker-compose, helm, k8s, etc.), any customisation and configuration you added. - - type: textarea - attributes: - label: What happened - description: Describe what happened. - placeholder: > - Please provide the context in which the problem occurred and explain what happened - - type: textarea - attributes: - label: What you think should happen instead - description: What do you think went wrong? - placeholder: > - Please explain why you think the behaviour is erroneous. It is extremely helpful if you copy&paste - the fragment of logs showing the exact error messages or wrong behaviour and screenshots for - UI problems or YouTube link to a video of you demonstrating the problem. You can include files by - dragging and dropping them here. - - type: textarea - attributes: - label: How to reproduce - description: > - What should we do to reproduce the problem? If you are not able to provide a reproducible case, - please open a [Discussion](https://github.com/apache/airflow/discussions) instead. - placeholder: > - Please make sure you provide a reproducible step-by-step case of how to reproduce the problem - as minimally and precisely as possible. Keep in mind we do not have access to your cluster or - DAGs. Remember that non-reproducible issues will be closed! Opening a discussion is - recommended as a first step. - validations: - required: true - - type: textarea - attributes: - label: Anything else - description: Anything else we need to know? - placeholder: > - How often does this problem occur? (Once? Every time? Only when certain conditions are met?) - Any relevant logs to include? Put them here inside fenced - ``` ``` blocks or inside a foldable details tag if it's long: -
x.log lots of stuff
- - type: checkboxes - attributes: - label: Are you willing to submit PR? - description: > - This is absolutely not required, but we are happy to guide you in the contribution process - especially if you already have a good understanding of how to implement the fix. - Airflow is a community-managed project and we love to bring new contributors in. - Find us in #new-contributors on Slack! - options: - - label: Yes I am willing to submit a PR! - - type: checkboxes - attributes: - label: Code of Conduct - description: > - The Code of Conduct helps create a safe space for everyone. We require - that everyone agrees to it. - options: - - label: > - I agree to follow this project's - [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md) - required: true - - type: markdown - attributes: - value: "Thanks for completing our form!" diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index 39e87cd7d8b52..a7fff7dc397fe 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -21,10 +21,7 @@ description: 'Sets up Python and Breeze' inputs: python-version: description: 'Python version to use' - default: "3.9" - use-uv: - description: 'Whether to use uv tool' - required: true + default: "3.10" outputs: host-python-version: description: Python version used in host @@ -41,6 +38,8 @@ runs: - name: "Install Breeze" shell: bash run: ./scripts/ci/install_breeze.sh + env: + PYTHON_VERSION: "${{ inputs.python-version }}" - name: "Free space" shell: bash run: breeze ci free-space diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml index dd7944841510a..f2ee5868aa96c 100644 --- a/.github/actions/install-pre-commit/action.yml +++ b/.github/actions/install-pre-commit/action.yml @@ -21,16 +21,19 @@ description: 'Installs pre-commit and related packages' inputs: python-version: description: 'Python version to use' - default: "3.9" + default: "3.10" uv-version: description: 'uv version to use' - default: "0.6.13" # Keep this comment to allow automatic replacement of uv version + default: "0.8.9" # Keep this comment to allow automatic replacement of uv version pre-commit-version: description: 'pre-commit version to use' - default: "4.2.0" # Keep this comment to allow automatic replacement of pre-commit version + default: "4.3.0" # Keep this comment to allow automatic replacement of pre-commit version pre-commit-uv-version: description: 'pre-commit-uv version to use' default: "4.1.4" # Keep this comment to allow automatic replacement of pre-commit-uv version + skip-pre-commits: + description: "Skip some pre-commits from installation" + default: "" runs: using: "composite" steps: @@ -40,6 +43,7 @@ runs: UV_VERSION: ${{inputs.uv-version}} PRE_COMMIT_VERSION: ${{inputs.pre-commit-version}} PRE_COMMIT_UV_VERSION: ${{inputs.pre-commit-uv-version}} + SKIP: ${{ inputs.skip-pre-commits }} run: | pip install uv==${UV_VERSION} || true uv tool install pre-commit==${PRE_COMMIT_VERSION} --with uv==${UV_VERSION} \ @@ -61,6 +65,16 @@ runs: key: cache-pre-commit-v4-${{ inputs.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }} path: /tmp/ id: restore-pre-commit-cache + - name: "Check if pre-commit cache tarball exists" + shell: bash + run: | + if [ -f /tmp/cache-pre-commit.tar.gz ]; then + echo "✅ Cache tarball found: /tmp/cache-pre-commit.tar.gz" + else + echo "❌ Cache tarball missing. Expected /tmp/cache-pre-commit.tar.gz" + exit 1 + fi + if: steps.restore-pre-commit-cache.outputs.stash-hit == 'true' - name: "Restore .cache from the tar file" run: tar -C ~ -xzf /tmp/cache-pre-commit.tar.gz shell: bash @@ -76,3 +90,5 @@ runs: shell: bash run: pre-commit install-hooks || (cat ~/.cache/pre-commit/pre-commit.log && exit 1) working-directory: ${{ github.workspace }} + env: + SKIP: ${{ inputs.skip-pre-commits }} diff --git a/.github/actions/migration_tests/action.yml b/.github/actions/migration_tests/action.yml index ed71e21407d10..93e966de8a847 100644 --- a/.github/actions/migration_tests/action.yml +++ b/.github/actions/migration_tests/action.yml @@ -18,18 +18,23 @@ --- name: 'Run migration tests' description: 'Runs migration tests' +inputs: + python-version: + description: "Python version to run the tests on" + required: true runs: using: "composite" steps: - name: "Test migration file 2 to 3 migration: ${{env.BACKEND}}" shell: bash run: | - breeze shell "${{ env.AIRFLOW_2_CMD }}" --use-airflow-version 2.10.5 --answer y && - breeze shell "${{ env.AIRFLOW_3_CMD }}" --no-db-cleanup + breeze shell "${AIRFLOW_2_CMD}" --use-airflow-version 2.11.0 --answer y && + breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANGERS} + ${AIRFLOW_3_CMD}" --no-db-cleanup env: COMPOSE_PROJECT_NAME: "docker-compose" - AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" DB_RESET: "false" + DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" AIRFLOW_2_CMD: >- airflow db reset --skip-init -y && airflow db migrate --to-revision heads @@ -37,63 +42,77 @@ runs: airflow db migrate --to-revision heads && airflow db downgrade -n 2.7.0 -y && airflow db migrate - if: env.BACKEND != 'sqlite' + # migration tests cannot be run with Python 3.13 now - currently we have no FAB and no FABDBManager - + # and airflow (correctly) refuses to migrate things to Airflow 2 when there is no "ab_user" + # table created. So migration tests for now will have to be excluded for Python 3.13 until + # we start working on 3.2 (with migration to 3.1) or until FAB is supported in 3.13 (FAB 5) + # TODO(potiuk) bring migration tests back for Python 3.13 when one of the two conditions are fulfilled + if: env.BACKEND != 'sqlite' && inputs.python-version != '3.13' - name: "Bring composer down" shell: bash run: breeze down env: COMPOSE_PROJECT_NAME: "docker-compose" + if: inputs.python-version != '3.13' - name: "Test ORM migration 2 to 3: ${{env.BACKEND}}" shell: bash run: > - breeze shell "${{ env.AIRFLOW_2_CMD }}" --use-airflow-version 2.10.5 --answer y && - breeze shell "${{ env.AIRFLOW_3_CMD }}" --no-db-cleanup + breeze shell "${AIRFLOW_2_CMD}" --use-airflow-version 2.11.0 --answer y && + breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANGERS} + ${AIRFLOW_3_CMD}" --no-db-cleanup env: COMPOSE_PROJECT_NAME: "docker-compose" - AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" DB_RESET: "false" + DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" AIRFLOW_2_CMD: >- airflow db reset -y AIRFLOW_3_CMD: >- airflow db migrate --to-revision heads && airflow db downgrade -n 2.7.0 -y && airflow db migrate - if: env.BACKEND != 'sqlite' + if: env.BACKEND != 'sqlite' && inputs.python-version != '3.13' - name: "Bring compose down again" shell: bash run: breeze down env: COMPOSE_PROJECT_NAME: "docker-compose" + if: inputs.python-version != '3.13' - name: "Test ORM migration ${{env.BACKEND}}" shell: bash run: > - breeze shell "airflow db reset -y && + breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANAGERS} && + airflow db reset -y && airflow db migrate --to-revision heads && airflow db downgrade -n 2.7.0 -y && airflow db migrate" env: COMPOSE_PROJECT_NAME: "docker-compose" - AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" + DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" + if: inputs.python-version != '3.13' - name: "Bring compose down again" shell: bash run: breeze down env: COMPOSE_PROJECT_NAME: "docker-compose" + if: inputs.python-version != '3.13' - name: "Test offline migration ${{env.BACKEND}}" shell: bash run: > - breeze shell "airflow db reset -y && + breeze shell + "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANAGERS} && + airflow db reset -y && airflow db downgrade -n 2.7.0 -y && airflow db migrate -s" env: COMPOSE_PROJECT_NAME: "docker-compose" - AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" - if: env.BACKEND != 'sqlite' + DB_MANAGERS: "airflow.providers.fab.auth_manager.models.db.FABDBManager" + if: env.BACKEND != 'sqlite' && inputs.python-version != '3.13' - name: "Bring any containers left down" shell: bash run: breeze down env: COMPOSE_PROJECT_NAME: "docker-compose" + if: inputs.python-version != '3.13' - name: "Dump logs on failure ${{env.BACKEND}}" shell: bash run: docker ps -q | xargs docker logs diff --git a/.github/actions/post_tests_success/action.yml b/.github/actions/post_tests_success/action.yml index b7b00a6fc0df3..234cde900e4d8 100644 --- a/.github/actions/post_tests_success/action.yml +++ b/.github/actions/post_tests_success/action.yml @@ -44,10 +44,12 @@ runs: mkdir ./files/coverage-reports mv ./files/coverage*.xml ./files/coverage-reports/ || true - name: "Upload all coverage reports to codecov" - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 env: CODECOV_TOKEN: ${{ inputs.codecov-token }} - if: env.ENABLE_COVERAGE == 'true' && env.TEST_TYPES != 'Helm' && inputs.python-version != '3.12' + if: > + env.ENABLE_COVERAGE == 'true' && env.TEST_TYPES != 'Helm' && inputs.python-version != '3.12' + && inputs.python-version != '3.13' with: name: coverage-${{env.JOB_ID}} flags: python-${{ env.PYTHON_MAJOR_MINOR_VERSION }},${{ env.BACKEND }}-${{ env.BACKEND_VERSION }} diff --git a/.github/actions/prepare_all_ci_images/action.yml b/.github/actions/prepare_all_ci_images/action.yml index 76c00a72a3998..7e2e8395636eb 100644 --- a/.github/actions/prepare_all_ci_images/action.yml +++ b/.github/actions/prepare_all_ci_images/action.yml @@ -34,14 +34,9 @@ runs: # TODO: Currently we cannot loop through the list of python versions and have dynamic list of # tasks. Instead we hardcode all possible python versions and they - but # this should be implemented in stash action as list of keys to download. - # That includes 3.8 - 3.12 as we are backporting it to v2-10-test branch + # That includes 3.9 - 3.12 as we are backporting it to v3-0-test branch # This is captured in https://github.com/apache/airflow/issues/45268 - - name: "Restore CI docker image ${{ inputs.platform }}:3.8" - uses: ./.github/actions/prepare_single_ci_image - with: - platform: ${{ inputs.platform }} - python: "3.8" - python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + # So we actually need 3.9 even if 3.9 support on main is dropped! - name: "Restore CI docker image ${{ inputs.platform }}:3.9" uses: ./.github/actions/prepare_single_ci_image with: @@ -66,3 +61,9 @@ runs: platform: ${{ inputs.platform }} python: "3.12" python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker image ${{ inputs.platform }}:3.13" + uses: ./.github/actions/prepare_single_ci_image + with: + platform: ${{ inputs.platform }} + python: "3.13" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} diff --git a/.github/actions/prepare_breeze_and_image/action.yml b/.github/actions/prepare_breeze_and_image/action.yml index 3254254a86516..0724167cda519 100644 --- a/.github/actions/prepare_breeze_and_image/action.yml +++ b/.github/actions/prepare_breeze_and_image/action.yml @@ -38,14 +38,17 @@ outputs: runs: using: "composite" steps: - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh shell: bash - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} id: breeze + - name: "Check free space" + shell: bash + run: | + echo "Checking free space!" + df -H - name: "Restore ${{ inputs.image-type }} docker image ${{ inputs.platform }}:${{ inputs.python }}" uses: apache/infrastructure-actions/stash/restore@1c35b5ccf8fba5d4c3fdf25a045ca91aa0cbc468 with: diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index cff760ad9dc38..1bfdfafef6c41 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -72,6 +72,9 @@ labelPRBasedOnFilePath: provider:apache-spark: - providers/apache/spark/** + provider:apache-tinkerpop: + - providers/apache/tinkerpop/** + provider:apprise: - providers/apprise/** @@ -108,9 +111,6 @@ labelPRBasedOnFilePath: provider:common-sql: - providers/common/sql/** - provider:standard: - - providers/standard/** - provider:databricks: - providers/databricks/** @@ -177,6 +177,9 @@ labelPRBasedOnFilePath: provider:jenkins: - providers/jenkins/** + provider:keycloak: + - providers/keycloak/** + provider:microsoft-azure: - providers/microsoft/azure/** @@ -216,7 +219,7 @@ labelPRBasedOnFilePath: provider:opsgenie: - providers/opsgenie/** - provider:Oracle: + provider:oracle: - providers/oracle/** provider:pagerduty: @@ -276,6 +279,9 @@ labelPRBasedOnFilePath: provider:ssh: - providers/ssh/** + provider:standard: + - providers/standard/** + provider:tableau: - providers/tableau/** @@ -326,6 +332,24 @@ labelPRBasedOnFilePath: - .rat-excludes - .readthedocs.yml + # This should be copy of the "area:dev-tools" above and should be updated when we switch maintenance branch + backport-to-v3-0-test: + - scripts/**/* + - dev/**/* + - .github/**/* + - Dockerfile.ci + - CONTRIBUTING.rst + - contributing-docs/**/* + - yamllint-config.yml + - .asf.yaml + - .bash_completion + - .dockerignore + - .hadolint.yaml + - .pre-commit-config.yaml + - .rat-excludes + - .readthedocs.yml + + kind:documentation: - airflow-core/docs/**/* - chart/docs/**/* @@ -341,6 +365,39 @@ labelPRBasedOnFilePath: - airflow-core/docs/ui.rst - airflow-core/src/airflow/ui/**/* + area:translations: + - airflow-core/src/airflow/ui/public/i18n/**/* + + translation:default: + - airflow-core/src/airflow/ui/public/i18n/locales/en/* + + translation:ar: + - airflow-core/src/airflow/ui/public/i18n/locales/ar/* + + translation:de: + - airflow-core/src/airflow/ui/public/i18n/locales/de/* + + translation:es: + - airflow-core/src/airflow/ui/public/i18n/locales/es/* + + translation:fr: + - airflow-core/src/airflow/ui/public/i18n/locales/fr/* + + translation:he: + - airflow-core/src/airflow/ui/public/i18n/locales/he/* + + translation:ko: + - airflow-core/src/airflow/ui/public/i18n/locales/ko/* + + translation:nl: + - airflow-core/src/airflow/ui/public/i18n/locales/nl/* + + translation:pl: + - airflow-core/src/airflow/ui/public/i18n/locales/pl/* + + translation:zh-TW: + - airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/* + area:CLI: - airflow-core/src/airflow/cli/**/*.py - airflow-core/tests/unit/cli/**/*.py @@ -353,11 +410,16 @@ labelPRBasedOnFilePath: - airflow-core/docs/administration-and-deployment/lineage.rst area:Logging: + - airflow-core/src/airflow/config_templates/airflow_local_settings.py + - airflow-core/tests/unit/core/test_logging_config.py - airflow-core/src/airflow/utils/log/**/* - airflow-core/docs/administration-and-deployment/logging-monitoring/logging-*.rst - airflow-core/tests/unit/utils/log/**/* - providers/**/log/* + area:ConfigTemplates: + - airflow-core/src/airflow/config_templates/* + area:Plugins: - airflow-core/src/airflow/cli/commands/plugins_command.py - airflow-core/src/airflow/plugins_manager.py @@ -423,6 +485,9 @@ labelPRBasedOnFilePath: area:task-sdk: - task-sdk/**/* + area:go-sdk: + - go-sdk/**/* + area:db-migrations: - airflow-core/src/airflow/migrations/versions/* diff --git a/.github/dependabot.yml b/.github/dependabot.yml index adefbb9f478f7..a0404b811674e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -33,7 +33,6 @@ updates: - package-ecosystem: npm directories: - - /airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui - /airflow-core/src/airflow/ui schedule: interval: daily @@ -41,6 +40,16 @@ updates: core-ui-package-updates: patterns: - "*" + + - package-ecosystem: npm + directories: + - /airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui + schedule: + interval: daily + groups: + core-ui-package-updates: + patterns: + - "*" - package-ecosystem: npm directories: - /providers/fab/src/airflow/providers/fab/www @@ -51,7 +60,7 @@ updates: patterns: - "*" - # Repeat dependency updates on 2.10 branch as well + # Repeat dependency updates on 2.11 branch as well - package-ecosystem: pip directories: - /clients/python @@ -60,14 +69,14 @@ updates: - / schedule: interval: daily - target-branch: v2-10-test + target-branch: v2-11-test - package-ecosystem: npm directories: - /airflow/www/ schedule: interval: daily - target-branch: v2-10-test + target-branch: v2-11-test groups: core-ui-package-updates: patterns: diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index e9529bd0d08d2..f3982389301e6 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -20,16 +20,12 @@ name: Additional CI image checks on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + runners: + description: "The array of labels (in json form) determining runners." required: true type: string - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." - required: true - type: string - runs-on-as-json-self-hosted: - description: "The array of labels (in json form) determining self-hosted runners." + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string python-versions: @@ -37,7 +33,7 @@ on: # yamllint disable-line rule:truthy required: true type: string branch: - description: "Branch used to run the CI jobs in (main/v2_*_test)." + description: "Branch used to run the CI jobs in (main/v*_*_test)." required: true type: string constraints-branch: @@ -103,12 +99,11 @@ jobs: # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. packages: write with: - runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ inputs.runs-on-as-json-self-hosted }} + runners: ${{ inputs.runners }} cache-type: "Early" include-prod-images: "false" push-latest-images: "false" - platform: "linux/amd64" + platform: ${{ inputs.platform }} python-versions: ${{ inputs.python-versions }} branch: ${{ inputs.branch }} constraints-branch: ${{ inputs.constraints-branch }} @@ -116,66 +111,36 @@ jobs: include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} - if: inputs.branch == 'main' + if: > + inputs.canary-run == 'true' && + (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') # Check that after earlier cache push, breeze command will build quickly check-that-image-builds-quickly: - timeout-minutes: 11 + timeout-minutes: 17 name: Check that image builds quickly - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: UPGRADE_TO_NEWER_DEPENDENCIES: false - PYTHON_MAJOR_MINOR_VERSION: ${{ inputs.default-python-version }} - PYTHON_VERSION: ${{ inputs.default-python-version }} + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + PYTHON_VERSION: "${{ inputs.default-python-version }}" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} VERBOSE: "true" + PLATFORM: ${{ inputs.platform }} if: inputs.branch == 'main' steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - - name: "Login to ghcr.io" - env: - actor: ${{ github.actor }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: echo "$GITHUB_TOKEN" | docker login ghcr.io -u "$actor" --password-stdin - name: "Check that image builds quickly" - run: breeze shell --max-time 600 --platform "linux/amd64" - -# # This is only a check if ARM images are successfully building when committer runs PR from -# # Apache repository. This is needed in case you want to fix failing cache job in "canary" run -# # There is no point in running this one in "canary" run, because the above step is doing the -# # same build anyway. -# build-ci-arm-images: -# name: Build CI ARM images -# uses: ./.github/workflows/ci-image-build.yml -# permissions: -# contents: read -# packages: write -# with: -# platform: "linux/arm64" -# push-image: "false" -# upload-image-artifact: "true" -# upload-mount-cache-artifact: ${{ inputs.canary-run }} -# runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} -# runs-on-as-json-self-hosted: ${{ inputs.runs-on-as-json-self-hosted }} -# python-versions: ${{ inputs.python-versions }} -# branch: ${{ inputs.branch }} -# constraints-branch: ${{ inputs.constraints-branch }} -# use-uv: ${{ inputs.use-uv }} -# upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} -# docker-cache: ${{ inputs.docker-cache }} -# disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} -# + run: breeze shell --max-time 900 --platform "${PLATFORM}" diff --git a/.github/workflows/additional-prod-image-tests.yml b/.github/workflows/additional-prod-image-tests.yml index e656f48a5a300..0820b1deb3664 100644 --- a/.github/workflows/additional-prod-image-tests.yml +++ b/.github/workflows/additional-prod-image-tests.yml @@ -20,8 +20,12 @@ name: Additional PROD image tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." + runners: + description: "The array of labels (in json form) determining runners." + required: true + type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string default-branch: @@ -36,10 +40,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to upgrade to newer dependencies (true/false)" required: true type: string - chicken-egg-providers: - description: "Whether to build chicken-egg provider distributions in the same run (true/false)" - required: true - type: string docker-cache: description: "Docker cache specification to build the image (registry, local, disabled)." required: true @@ -67,14 +67,12 @@ jobs: name: PROD image extra checks (main) uses: ./.github/workflows/prod-image-extra-checks.yml with: - runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} python-versions: "[ '${{ inputs.default-python-version }}' ]" - default-python-version: ${{ inputs.default-python-version }} + default-python-version: "${{ inputs.default-python-version }}" branch: ${{ inputs.default-branch }} - use-uv: "false" - build-provider-distributions: ${{ inputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} @@ -84,14 +82,12 @@ jobs: name: PROD image extra checks (release) uses: ./.github/workflows/prod-image-extra-checks.yml with: - runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} python-versions: "[ '${{ inputs.default-python-version }}' ]" - default-python-version: ${{ inputs.default-python-version }} + default-python-version: "${{ inputs.default-python-version }}" branch: ${{ inputs.default-branch }} - use-uv: "false" - build-provider-distributions: ${{ inputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} @@ -100,7 +96,7 @@ jobs: test-examples-of-prod-image-building: timeout-minutes: 60 name: "Test examples of PROD image building" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -111,24 +107,22 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 2 persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - name: "Prepare breeze & PROD image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" + platform: ${{ inputs.platform }} image-type: "prod" - python: ${{ inputs.default-python-version }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "Test examples of PROD image building" env: GITHUB_REPOSITORY: ${{ github.repository }} DEFAULT_BRANCH: ${{ inputs.default-branch }} - DEFAULT_PYTHON_VERSION: ${{ inputs.default-python-version }} + DEFAULT_PYTHON_VERSION: "${{ inputs.default-python-version }}" run: " cd ./docker-tests && \ TEST_IMAGE=\"ghcr.io/$GITHUB_REPOSITORY/$DEFAULT_BRANCH\ @@ -138,7 +132,7 @@ jobs: test-docker-compose-quick-start: timeout-minutes: 60 name: "Docker Compose quick start with PROD image verifying" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" GITHUB_REPOSITORY: ${{ github.repository }} @@ -150,17 +144,47 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 2 persist-credentials: false - name: "Prepare breeze & PROD image: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" + platform: ${{ inputs.platform }} image-type: "prod" python: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} use-uv: ${{ inputs.use-uv }} id: breeze - name: "Test docker-compose quick start" run: breeze testing docker-compose-tests + + task-sdk-integration-tests: + timeout-minutes: 60 + name: "Task SDK integration tests with PROD image" + runs-on: ${{ fromJSON(inputs.runners) }} + env: + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + fetch-depth: 2 + persist-credentials: false + - name: "Prepare breeze & PROD image: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: ${{ inputs.platform }} + image-type: "prod" + python: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + use-uv: ${{ inputs.use-uv }} + id: breeze + - name: "Run Task SDK integration tests" + run: breeze testing task-sdk-integration-tests diff --git a/.github/workflows/airflow-distributions-tests.yml b/.github/workflows/airflow-distributions-tests.yml index c7071c5f34d7c..98203e47b3ebc 100644 --- a/.github/workflows/airflow-distributions-tests.yml +++ b/.github/workflows/airflow-distributions-tests.yml @@ -21,6 +21,14 @@ on: # yamllint disable-line rule:truthy workflow_call: inputs: # Static inputs defined to choose which distribution to test to run + runners: + description: "The array of labels (in json form) determining runners." + required: true + type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" + required: true + type: string distribution-name: description: "The name of the distribution to test" required: true @@ -33,11 +41,6 @@ on: # yamllint disable-line rule:truthy description: "distribution test type" # eg task-sdk-tests required: true type: string - # Environment inputs - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." - required: true - type: string default-python-version: description: "Which version of python should be used by default" required: true @@ -54,13 +57,22 @@ on: # yamllint disable-line rule:truthy description: "Whether this is a canary run (true/false)" required: true type: string + use-local-venv: + description: "Whether local venv should be used for tests (true/false)" + required: true + type: string + test-timeout: + required: false + type: number + default: 60 + permissions: contents: read jobs: distributions-tests: - timeout-minutes: 80 + timeout-minutes: ${{ fromJSON(inputs.test-timeout) }} name: ${{ inputs.distribution-name }}:P${{ matrix.python-version }} tests - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: fail-fast: false matrix: @@ -77,27 +89,41 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" + platform: ${{ inputs.platform }} python: ${{ matrix.python-version }} use-uv: ${{ inputs.use-uv }} + if: ${{ inputs.use-local-venv != 'true' }} + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + shell: bash + if: ${{ inputs.use-local-venv == 'true' }} + - name: "Install Breeze" + uses: ./.github/actions/breeze + if: ${{ inputs.use-local-venv == 'true' }} - name: "Cleanup dist files" run: rm -fv ./dist/* + if: ${{ matrix.python-version == inputs.default-python-version }} # Conditional steps based on the distribution name - name: "Prepare Airflow ${{inputs.distribution-name}}: wheel" env: DISTRIBUTION_TYPE: "${{ inputs.distribution-cmd-format }}" - run: > + USE_LOCAL_HATCH: "${{ inputs.use-local-venv }}" + run: | + uv tool uninstall hatch || true + uv tool install hatch==1.14.1 breeze release-management "${DISTRIBUTION_TYPE}" --distribution-format wheel + if: ${{ matrix.python-version == inputs.default-python-version }} - name: "Verify wheel packages with twine" run: | uv tool uninstall twine || true uv tool install twine && twine check dist/*.whl + if: ${{ matrix.python-version == inputs.default-python-version }} - name: > Run unit tests for Airflow ${{inputs.distribution-name}}:Python ${{ matrix.python-version }} env: diff --git a/.github/workflows/automatic-backport.yml b/.github/workflows/automatic-backport.yml index 4c72401a5d317..4f861ddd58118 100644 --- a/.github/workflows/automatic-backport.yml +++ b/.github/workflows/automatic-backport.yml @@ -37,7 +37,7 @@ jobs: - name: Find PR information id: pr-info - uses: actions/github-script@v7 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/backport-cli.yml b/.github/workflows/backport-cli.yml index 673607027496d..42f8178868267 100644 --- a/.github/workflows/backport-cli.yml +++ b/.github/workflows/backport-cli.yml @@ -53,7 +53,7 @@ jobs: steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" id: checkout-for-backport - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: true fetch-depth: 0 @@ -70,8 +70,8 @@ jobs: TARGET_BRANCH: ${{ inputs.target-branch }} COMMIT_SHA: ${{ inputs.commit-sha }} run: | - git config --global user.email "name@example.com" - git config --global user.name "Your Name" + git config --global user.email "bot@airflow.apache.org" + git config --global user.name "Your friendly bot" set +e { echo 'cherry_picker_output< pre-commit run @@ -280,7 +280,7 @@ jobs: # For UV we are not failing the upgrade installers check if it is updated because # it is upgraded very frequently, so we want to manually upgrade it rather than # get notified about it - until it stabilizes in 1.* version - - name: "Run automated upgrade for uv (open to see if new version is updated)" + - name: "Run automated upgrade for uv (not failing - just informational)" run: > pre-commit run --all-files --show-diff-on-failure --color always --verbose @@ -288,10 +288,13 @@ jobs: if: always() env: UPGRADE_UV: "true" + UPGRADE_PYTHON: "false" + UPGRADE_GOLANG: "true" UPGRADE_PIP: "false" UPGRADE_PRE_COMMIT: "false" UPGRADE_NODE_LTS: "false" - - name: "Run automated upgrade for pip, pre-commit and node" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Run automated upgrade for pip, pre-commit and node (failing if needed)" run: > pre-commit run --all-files --show-diff-on-failure --color always --verbose @@ -299,14 +302,17 @@ jobs: if: always() env: UPGRADE_UV: "false" + UPGRADE_PYTHON: "true" + UPGRADE_GOLANG: "false" UPGRADE_PIP: "true" UPGRADE_PRE_COMMIT: "true" UPGRADE_NODE_LTS: "true" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} test-airflow-release-commands: timeout-minutes: 80 name: "Test Airflow release commands" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" GITHUB_REPOSITORY: ${{ github.repository }} @@ -319,42 +325,43 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist files" run: rm -fv ./dist/* - name: Setup git for tagging run: | - git config --global user.email "name@example.com" - git config --global user.name "Your Name" + git config --global user.email "bot@airflow.apache.org" + git config --global user.name "Your friendly bot" - name: Install twine run: pip install twine - name: "Check Airflow create minor branch command" - run: | - ./scripts/ci/testing/run_breeze_command_with_retries.sh \ - release-management create-minor-branch --version-branch 2-8 --answer yes + run: > + breeze release-management create-minor-branch + --version-branch 3-1 --answer yes --dry-run - name: "Check Airflow RC process command" - run: | - ./scripts/ci/testing/run_breeze_command_with_retries.sh \ - release-management start-rc-process --version 2.8.3rc1 --previous-version 2.8.0 --answer yes + run: > + breeze release-management start-rc-process + --version 3.1.0rc1 --previous-version 3.0.0 --answer yes --dry-run - name: "Check Airflow release process command" - run: | - ./scripts/ci/testing/run_breeze_command_with_retries.sh \ - release-management start-release --release-candidate 2.8.3rc1 --previous-release 2.8.0 --answer yes + run: > + breeze release-management start-release --release-candidate 3.1.0rc1 + --previous-release 3.0.0 --answer yes --dry-run - name: "Test providers metadata generation" run: | - ./scripts/ci/testing/run_breeze_command_with_retries.sh \ - release-management generate-providers-metadata --refresh-constraints - - name: "Fetch all git tags" + git remote add apache https://github.com/apache/airflow.git + git fetch apache --tags + breeze release-management generate-providers-metadata --refresh-constraints-and-airflow-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Fetch all git tags for origin" run: git fetch --tags >/dev/null 2>&1 || true - name: "Test airflow core issue generation automatically" run: | - ./scripts/ci/testing/run_breeze_command_with_retries.sh \ - release-management generate-issue-content-core --limit-pr-count 25 --latest --verbose + breeze release-management generate-issue-content-core \ + --limit-pr-count 2 --previous-release 3.0.1 --current-release 3.0.2 --verbose diff --git a/.github/workflows/ci-amd.yml b/.github/workflows/ci-amd.yml new file mode 100644 index 0000000000000..13d45625c35dc --- /dev/null +++ b/.github/workflows/ci-amd.yml @@ -0,0 +1,986 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Tests AMD +on: # yamllint disable-line rule:truthy + schedule: + - cron: '28 1,7,13,19 * * *' + push: + branches: + - v[0-9]+-[0-9]+-test + - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ + pull_request: + branches: + - main + - v[0-9]+-[0-9]+-test + - v[0-9]+-[0-9]+-stable + - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ + types: [opened, reopened, synchronize, ready_for_review] + workflow_dispatch: +permissions: + # All other permissions are set to none by default + contents: read +env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + VERBOSE: "true" + +concurrency: + group: ci-amd-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + + build-info: + name: "Build info" + # At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners + runs-on: ["ubuntu-22.04"] + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + outputs: + all-python-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} + basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} + canary-run: ${{ steps.source-run-info.outputs.canary-run }} + ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} + core-test-types-list-as-strings-in-json: >- + ${{ steps.selective-checks.outputs.core-test-types-list-as-strings-in-json }} + debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} + default-branch: ${{ steps.selective-checks.outputs.default-branch }} + default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} + default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} + default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} + default-kubernetes-version: ${{ steps.selective-checks.outputs.default-kubernetes-version }} + default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} + default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} + default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} + disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} + docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} + docs-build: ${{ steps.selective-checks.outputs.docs-build }} + docs-list-as-string: ${{ steps.selective-checks.outputs.docs-list-as-string }} + excluded-providers-as-string: ${{ steps.selective-checks.outputs.excluded-providers-as-string }} + force-pip: ${{ steps.selective-checks.outputs.force-pip }} + full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} + has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} + helm-test-packages: ${{ steps.selective-checks.outputs.helm-test-packages }} + include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} + individual-providers-test-types-list-as-strings-in-json: >- + ${{ steps.selective-checks.outputs.individual-providers-test-types-list-as-strings-in-json }} + kubernetes-combos: ${{ steps.selective-checks.outputs.kubernetes-combos }} + kubernetes-combos-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} + kubernetes-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} + latest-versions-only: ${{ steps.selective-checks.outputs.latest-versions-only }} + mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} + mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} + mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} + needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} + needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} + needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} + needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} + only-new-ui-files: ${{ steps.selective-checks.outputs.only-new-ui-files }} + postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} + postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} + prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: > + ${{ steps.selective-checks.outputs.providers-compatibility-tests-matrix }} + providers-test-types-list-as-strings-in-json: >- + ${{ steps.selective-checks.outputs.providers-test-types-list-as-strings-in-json }} + pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }} + python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} + python-versions: ${{ steps.selective-checks.outputs.python-versions }} + run-amazon-tests: ${{ steps.selective-checks.outputs.run-amazon-tests }} + run-airflow-ctl-tests: ${{ steps.selective-checks.outputs.run-airflow-ctl-tests }} + run-coverage: ${{ steps.source-run-info.outputs.run-coverage }} + run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} + run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} + run-go-sdk-tests: ${{ steps.selective-checks.outputs.run-go-sdk-tests }} + run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} + run-tests: ${{ steps.selective-checks.outputs.run-tests }} + run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} + run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} + amd-runners: ${{ steps.selective-checks.outputs.amd-runners }} + arm-runners: ${{ steps.selective-checks.outputs.arm-runners }} + selected-providers-list-as-string: >- + ${{ steps.selective-checks.outputs.selected-providers-list-as-string }} + skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} + skip-providers-tests: ${{ steps.selective-checks.outputs.skip-providers-tests }} + source-head-repo: ${{ steps.source-run-info.outputs.head-repo }} + source-head-ref: ${{ steps.source-run-info.outputs.head-ref }} + sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} + testable-core-integrations: ${{ steps.selective-checks.outputs.testable-core-integrations }} + testable-providers-integrations: ${{ steps.selective-checks.outputs.testable-providers-integrations }} + use-uv: ${{ steps.selective-checks.outputs.force-pip == 'true' && 'false' || 'true' }} + upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: Fetch incoming commit ${{ github.sha }} with its parent + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + ref: ${{ github.sha }} + fetch-depth: 2 + persist-credentials: false + - name: "Install Breeze" + uses: ./.github/actions/breeze + id: breeze + - name: "Get information about the Workflow" + id: source-run-info + run: breeze ci get-workflow-info 2>> ${GITHUB_OUTPUT} + env: + SKIP_BREEZE_SELF_UPGRADE_CHECK: "true" + - name: Selective checks + id: selective-checks + env: + PR_LABELS: "${{ steps.source-run-info.outputs.pr-labels }}" + COMMIT_REF: "${{ github.sha }}" + VERBOSE: "false" + run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} + - name: env + run: printenv + env: + PR_LABELS: ${{ steps.source-run-info.outputs.pr-labels }} + GITHUB_CONTEXT: ${{ toJson(github) }} + + run-pin-versions-pre-commit: + name: "Run pin-versions pre-commit" + needs: [build-info] + runs-on: ${{ fromJSON(needs.build-info.outputs.amd-runners) }} + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Install Python 3.11 as 3.11+ is needed by pin-versions pre-commit" + uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c # v4.9.1 + with: + python-version: 3.11 + cache: "pip" + - name: Install pre-commit, uv, and pre-commit-uv + shell: bash + env: + UV_VERSION: "0.8.9" # Keep this comment to allow automatic replacement of uv version + PRE_COMMIT_VERSION: "4.3.0" # Keep this comment to allow automatic replacement of pre-commit version + PRE_COMMIT_UV_VERSION: "4.1.4" # Keep this comment to allow automatic replacement of pre-commit-uv version + run: | + pip install uv==${UV_VERSION} || true + uv tool install pre-commit==${PRE_COMMIT_VERSION} --with uv==${UV_VERSION} \ + --with pre-commit-uv==${PRE_COMMIT_UV_VERSION} + - name: "Run pin-versions pre-commit" + run: > + pre-commit run -c .github/.pre-commit-config.yaml --all-files --verbose --hook-stage manual + pin-versions + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + basic-tests: + name: "Basic tests" + needs: [build-info] + uses: ./.github/workflows/basic-tests.yml + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + run-ui-tests: ${{needs.build-info.outputs.run-ui-tests}} + run-www-tests: ${{needs.build-info.outputs.run-www-tests}} + needs-api-codegen: ${{needs.build-info.outputs.needs-api-codegen}} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + basic-checks-only: ${{needs.build-info.outputs.basic-checks-only}} + skip-pre-commits: ${{needs.build-info.outputs.skip-pre-commits}} + canary-run: ${{needs.build-info.outputs.canary-run}} + latest-versions-only: ${{needs.build-info.outputs.latest-versions-only}} + use-uv: ${{needs.build-info.outputs.use-uv}} + + build-ci-images: + name: Build CI images + needs: [build-info] + uses: ./.github/workflows/ci-image-build.yml + permissions: + contents: read + # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs + # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. + packages: write + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + push-image: "false" + upload-image-artifact: "true" + upload-mount-cache-artifact: ${{ needs.build-info.outputs.canary-run }} + python-versions: ${{ needs.build-info.outputs.python-versions }} + branch: ${{ needs.build-info.outputs.default-branch }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + if: needs.build-info.outputs.ci-image-build == 'true' + + additional-ci-image-checks: + name: "Additional CI image checks" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/additional-ci-image-checks.yml + permissions: + contents: read + packages: write + id-token: write + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + python-versions: ${{ needs.build-info.outputs.python-versions }} + branch: ${{ needs.build-info.outputs.default-branch }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + skip-pre-commits: ${{ needs.build-info.outputs.skip-pre-commits }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + + generate-constraints: + name: "Generate constraints" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/generate-constraints.yml + if: needs.build-info.outputs.ci-image-build == 'true' + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} + python-versions: ${{ needs.build-info.outputs.python-versions }} + generate-pypi-constraints: "true" + # generate no providers constraints only in canary builds - they take quite some time to generate + # they are not needed for regular builds, they are only needed to update constraints in canaries + generate-no-providers-constraints: ${{ needs.build-info.outputs.canary-run }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + + ci-image-checks: + name: "CI image checks" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/ci-image-checks.yml + permissions: + id-token: write + contents: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + needs-mypy: ${{ needs.build-info.outputs.needs-mypy }} + mypy-checks: ${{ needs.build-info.outputs.mypy-checks }} + python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} + branch: ${{ needs.build-info.outputs.default-branch }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + docs-list-as-string: ${{ needs.build-info.outputs.docs-list-as-string }} + latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} + basic-checks-only: ${{ needs.build-info.outputs.basic-checks-only }} + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + skip-pre-commits: ${{ needs.build-info.outputs.skip-pre-commits }} + ci-image-build: ${{ needs.build-info.outputs.ci-image-build }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + docs-build: ${{ needs.build-info.outputs.docs-build }} + needs-api-codegen: ${{ needs.build-info.outputs.needs-api-codegen }} + default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + source-head-repo: ${{ needs.build-info.outputs.source-head-repo }} + source-head-ref: ${{ needs.build-info.outputs.source-head-ref }} + secrets: + DOCS_AWS_ACCESS_KEY_ID: ${{ secrets.DOCS_AWS_ACCESS_KEY_ID }} + DOCS_AWS_SECRET_ACCESS_KEY: ${{ secrets.DOCS_AWS_SECRET_ACCESS_KEY }} + + providers: + name: "provider distributions tests" + uses: ./.github/workflows/test-providers.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + if: > + needs.build-info.outputs.skip-providers-tests != 'true' && + needs.build-info.outputs.latest-versions-only != 'true' && + needs.build-info.outputs.run-tests == 'true' + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + canary-run: ${{ needs.build-info.outputs.canary-run }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + selected-providers-list-as-string: ${{ needs.build-info.outputs.selected-providers-list-as-string }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: > + ${{ needs.build-info.outputs.providers-compatibility-tests-matrix }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + python-versions: ${{ needs.build-info.outputs.python-versions }} + providers-test-types-list-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + + tests-helm: + name: "Helm tests" + uses: ./.github/workflows/helm-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + helm-test-packages: ${{ needs.build-info.outputs.helm-test-packages }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: > + needs.build-info.outputs.needs-helm-tests == 'true' && + needs.build-info.outputs.default-branch == 'main' && + needs.build-info.outputs.latest-versions-only != 'true' + + tests-postgres-core: + name: "Postgres tests: core" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "postgres" + test-name: "Postgres" + test-scope: "DB" + test-group: "core" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: ${{ needs.build-info.outputs.postgres-versions }} + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.postgres-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-migration-tests: "true" + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + + if: needs.build-info.outputs.run-tests == 'true' + + tests-postgres-providers: + name: "Postgres tests: providers" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "postgres" + test-name: "Postgres" + test-scope: "DB" + test-group: "providers" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: ${{ needs.build-info.outputs.postgres-versions }} + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.postgres-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-migration-tests: "true" + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-mysql-core: + name: "MySQL tests: core" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "mysql" + test-name: "MySQL" + test-scope: "DB" + test-group: "core" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: ${{ needs.build-info.outputs.mysql-versions }} + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.mysql-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + run-migration-tests: "true" + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-mysql-providers: + name: "MySQL tests: providers" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "mysql" + test-name: "MySQL" + test-scope: "DB" + test-group: "providers" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: ${{ needs.build-info.outputs.mysql-versions }} + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.mysql-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + run-migration-tests: "true" + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + + tests-sqlite-core: + name: "Sqlite tests: core" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "sqlite" + test-name: "Sqlite" + test-name-separator: "" + test-scope: "DB" + test-group: "core" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for sqlite + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + run-migration-tests: "true" + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-sqlite-providers: + name: "Sqlite tests: providers" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "sqlite" + test-name: "Sqlite" + test-name-separator: "" + test-scope: "DB" + test-group: "providers" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for sqlite + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + run-migration-tests: "true" + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + + tests-non-db-core: + name: "Non-DB tests: core" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "sqlite" + test-name: "" + test-name-separator: "" + test-scope: "Non-DB" + test-group: "core" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for non-db + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-non-db-providers: + name: "Non-DB tests: providers" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + backend: "sqlite" + test-name: "" + test-name-separator: "" + test-scope: "Non-DB" + test-group: "providers" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for non-db + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-special: + name: "Special tests" + uses: ./.github/workflows/special-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + if: > + needs.build-info.outputs.run-tests == 'true' && + (needs.build-info.outputs.canary-run == 'true' || + needs.build-info.outputs.upgrade-to-newer-dependencies != 'false' || + needs.build-info.outputs.full-tests-needed == 'true') + with: + default-branch: ${{ needs.build-info.outputs.default-branch }} + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + core-test-types-list-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + providers-test-types-list-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + python-versions: ${{ needs.build-info.outputs.python-versions }} + default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + + tests-integration-system: + name: Integration and System Tests + needs: [build-info, build-ci-images] + uses: ./.github/workflows/integration-system-tests.yml + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + testable-core-integrations: ${{ needs.build-info.outputs.testable-core-integrations }} + testable-providers-integrations: ${{ needs.build-info.outputs.testable-providers-integrations }} + run-system-tests: ${{ needs.build-info.outputs.run-tests }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} + default-mysql-version: ${{ needs.build-info.outputs.default-mysql-version }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-with-lowest-direct-resolution-core: + name: "Low dep tests:core" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/run-unit-tests.yml + permissions: + contents: read + packages: read + if: > + needs.build-info.outputs.run-tests == 'true' + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + test-name: "LowestDeps" + force-lowest-dependencies: "true" + test-scope: "All" + test-group: "core" + backend: "sqlite" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: "['${{ needs.build-info.outputs.default-postgres-version }}']" + excluded-providers-as-string: "" + excludes: "[]" + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + monitor-delay-time-in-seconds: 120 + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + + tests-with-lowest-direct-resolution-providers: + name: "Low dep tests: providers" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/run-unit-tests.yml + permissions: + contents: read + packages: read + if: needs.build-info.outputs.run-tests == 'true' + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + test-name: "LowestDeps" + force-lowest-dependencies: "true" + test-scope: "All" + test-group: "providers" + backend: "sqlite" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: "['${{ needs.build-info.outputs.default-postgres-version }}']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: "[]" + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.individual-providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + monitor-delay-time-in-seconds: 120 + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + + build-prod-images: + name: Build PROD images + needs: [build-info, build-ci-images, generate-constraints] + uses: ./.github/workflows/prod-image-build.yml + permissions: + contents: read + # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs + # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. + packages: write + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + build-type: "Regular" + push-image: "false" + upload-image-artifact: "true" + upload-package-artifact: "true" + python-versions: ${{ needs.build-info.outputs.python-versions }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + branch: ${{ needs.build-info.outputs.default-branch }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + prod-image-build: ${{ needs.build-info.outputs.prod-image-build }} + + additional-prod-image-tests: + name: "Additional PROD image tests" + needs: [build-info, build-prod-images, generate-constraints] + uses: ./.github/workflows/additional-prod-image-tests.yml + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + default-branch: ${{ needs.build-info.outputs.default-branch }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + canary-run: ${{ needs.build-info.outputs.canary-run }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: needs.build-info.outputs.prod-image-build == 'true' + + tests-kubernetes: + name: "Kubernetes tests" + uses: ./.github/workflows/k8s-tests.yml + needs: [build-info, build-prod-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + kubernetes-combos: ${{ needs.build-info.outputs.kubernetes-combos }} + if: > + ( needs.build-info.outputs.run-kubernetes-tests == 'true' || + needs.build-info.outputs.needs-helm-tests == 'true') + + tests-task-sdk: + name: "Task SDK tests" + uses: ./.github/workflows/airflow-distributions-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + python-versions: ${{ needs.build-info.outputs.python-versions }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + distribution-name: "task-sdk" + distribution-cmd-format: "prepare-task-sdk-distributions" + test-type: "task-sdk-tests" + use-local-venv: 'false' + test-timeout: 20 + if: > + ( needs.build-info.outputs.run-task-sdk-tests == 'true' || + needs.build-info.outputs.run-tests == 'true' && + needs.build-info.outputs.only-new-ui-files != 'true') + + tests-go-sdk: + name: "Go SDK tests" + needs: [build-info, build-ci-images] + runs-on: ${{ fromJSON(needs.build-info.outputs.amd-runners) }} + timeout-minutes: 15 + permissions: + contents: read + packages: read + if: > + ( needs.build-info.outputs.run-go-sdk-tests == 'true' || + needs.build-info.outputs.run-tests == 'true' && + needs.build-info.outputs.only-new-ui-files != 'true') + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + # keep this in sync with go.mod in go-sdk/ + - name: Setup Go + uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 + with: + go-version: 1.24 + cache-dependency-path: go-sdk/go.sum + + # keep this in sync with go.mod in go-sdk/ + - name: Setup Gotestsum + shell: bash + run: | + go install gotest.tools/gotestsum@ddd0b05a6878e2e8257a2abe6e7df66cebc53d0e # v1.12.3 + gotestsum --version + + - name: "Cleanup dist files" + run: rm -fv ./dist/* + + - name: Run Go tests + working-directory: ./go-sdk + run: gotestsum --format testname ./... + + tests-airflow-ctl: + name: "Airflow CTL tests" + uses: ./.github/workflows/airflow-distributions-tests.yml + needs: [build-info] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + python-versions: ${{ needs.build-info.outputs.python-versions }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + distribution-name: "airflow-ctl" + distribution-cmd-format: "prepare-airflow-ctl-distributions" + test-type: "airflow-ctl-tests" + use-local-venv: 'true' + test-timeout: 20 + if: > + ( needs.build-info.outputs.run-airflow-ctl-tests == 'true' || + needs.build-info.outputs.run-tests == 'true' && + needs.build-info.outputs.only-new-ui-files != 'true') + + finalize-tests: + name: Finalize tests + permissions: + contents: write + packages: write + # This will fire when all the jobs from "needs" are either successful or skipped + if: always() && !failure() && !cancelled() + needs: + - additional-ci-image-checks + - additional-prod-image-tests + - basic-tests + - build-info + - build-prod-images + - ci-image-checks + - generate-constraints + - providers + - tests-helm + - tests-integration-system + - tests-kubernetes + - tests-mysql-core + - tests-mysql-providers + - tests-non-db-core + - tests-non-db-providers + - tests-postgres-core + - tests-postgres-providers + # - tests-special + - tests-sqlite-core + - tests-sqlite-providers + - tests-task-sdk + - tests-airflow-ctl + - tests-with-lowest-direct-resolution-core + - tests-with-lowest-direct-resolution-providers + uses: ./.github/workflows/finalize-tests.yml + with: + runners: ${{ needs.build-info.outputs.amd-runners }} + platform: "linux/amd64" + python-versions: ${{ needs.build-info.outputs.python-versions }} + python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} + branch: ${{ needs.build-info.outputs.default-branch }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + + notify-slack-failure: + name: "Notify Slack on Failure" + needs: + - finalize-tests + if: github.event_name == 'schedule' && failure() && github.run_attempt == 1 + runs-on: ["ubuntu-22.04"] + steps: + - name: Notify Slack + id: slack + uses: slackapi/slack-github-action@485a9d42d3a73031f12ec201c457e2162c45d02d # v2.0.0 + with: + method: chat.postMessage + token: ${{ env.SLACK_BOT_TOKEN }} + # yamllint disable rule:line-length + payload: | + channel: "internal-airflow-ci-cd" + text: "🚨🕒 Failure Alert: Scheduled CI (AMD) on branch *${{ github.ref_name }}* 🕒🚨\n\n*Details:* " + blocks: + - type: "section" + text: + type: "mrkdwn" + text: "🚨🕒 Failure Alert: Scheduled CI (AMD) 🕒🚨\n\n*Details:* " + # yamllint enable rule:line-length + + summarize-warnings: + timeout-minutes: 15 + name: "Summarize warnings" + runs-on: ${{ fromJSON(needs.build-info.outputs.amd-runners) }} + if: needs.build-info.outputs.run-tests == 'true' + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: "Free up disk space" + shell: bash + run: ./scripts/tools/free_up_disk_space.sh + - name: "Download all test warning artifacts from the current build" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + path: ./artifacts + pattern: test-warnings-* + - name: "Setup python" + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "${{ inputs.default-python-version }}" + - name: "Summarize all warnings" + run: | + ./scripts/ci/testing/summarize_captured_warnings.py ./artifacts \ + --pattern "**/warnings-*.txt" \ + --output ./files + - name: "Upload artifact for summarized warnings" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: test-summarized-amd-runner-warnings + path: ./files/warn-summary-*.txt + retention-days: 7 + if-no-files-found: ignore + overwrite: true diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml new file mode 100644 index 0000000000000..89014e80f6742 --- /dev/null +++ b/.github/workflows/ci-arm.yml @@ -0,0 +1,614 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Tests ARM +on: # yamllint disable-line rule:truthy + schedule: + - cron: '28 3,9,15,21 * * *' + push: + branches: + - v[0-9]+-[0-9]+-test + - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ + workflow_dispatch: +permissions: + # All other permissions are set to none by default + contents: read +env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + VERBOSE: "true" + +concurrency: + group: ci-arm-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + + build-info: + name: "Build info" + # At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners + runs-on: ["ubuntu-22.04"] + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + outputs: + all-python-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} + basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} + canary-run: ${{ steps.source-run-info.outputs.canary-run }} + ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} + core-test-types-list-as-strings-in-json: >- + ${{ steps.selective-checks.outputs.core-test-types-list-as-strings-in-json }} + debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} + default-branch: ${{ steps.selective-checks.outputs.default-branch }} + default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} + default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} + default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} + default-kubernetes-version: ${{ steps.selective-checks.outputs.default-kubernetes-version }} + default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} + default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} + default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} + disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} + docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} + docs-build: ${{ steps.selective-checks.outputs.docs-build }} + docs-list-as-string: ${{ steps.selective-checks.outputs.docs-list-as-string }} + excluded-providers-as-string: ${{ steps.selective-checks.outputs.excluded-providers-as-string }} + force-pip: ${{ steps.selective-checks.outputs.force-pip }} + full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} + has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} + helm-test-packages: ${{ steps.selective-checks.outputs.helm-test-packages }} + include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} + individual-providers-test-types-list-as-strings-in-json: >- + ${{ steps.selective-checks.outputs.individual-providers-test-types-list-as-strings-in-json }} + kubernetes-combos: ${{ steps.selective-checks.outputs.kubernetes-combos }} + kubernetes-combos-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} + kubernetes-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} + latest-versions-only: ${{ steps.selective-checks.outputs.latest-versions-only }} + mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} + mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} + mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} + needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} + needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} + needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} + needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} + only-new-ui-files: ${{ steps.selective-checks.outputs.only-new-ui-files }} + postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} + postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} + prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: > + ${{ steps.selective-checks.outputs.providers-compatibility-tests-matrix }} + providers-test-types-list-as-strings-in-json: >- + ${{ steps.selective-checks.outputs.providers-test-types-list-as-strings-in-json }} + pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }} + python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} + python-versions: ${{ steps.selective-checks.outputs.python-versions }} + run-amazon-tests: ${{ steps.selective-checks.outputs.run-amazon-tests }} + run-airflow-ctl-tests: ${{ steps.selective-checks.outputs.run-airflow-ctl-tests }} + run-coverage: ${{ steps.source-run-info.outputs.run-coverage }} + run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} + run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} + run-go-sdk-tests: ${{ steps.selective-checks.outputs.run-go-sdk-tests }} + run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} + run-tests: ${{ steps.selective-checks.outputs.run-tests }} + run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} + run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} + amd-runners: ${{ steps.selective-checks.outputs.amd-runners }} + arm-runners: ${{ steps.selective-checks.outputs.arm-runners }} + selected-providers-list-as-string: >- + ${{ steps.selective-checks.outputs.selected-providers-list-as-string }} + skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} + skip-providers-tests: ${{ steps.selective-checks.outputs.skip-providers-tests }} + source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} + sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} + testable-core-integrations: ${{ steps.selective-checks.outputs.testable-core-integrations }} + testable-providers-integrations: ${{ steps.selective-checks.outputs.testable-providers-integrations }} + use-uv: ${{ steps.selective-checks.outputs.force-pip == 'true' && 'false' || 'true' }} + upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: Fetch incoming commit ${{ github.sha }} with its parent + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + ref: ${{ github.sha }} + fetch-depth: 2 + persist-credentials: false + - name: "Install Breeze" + uses: ./.github/actions/breeze + id: breeze + - name: "Get information about the Workflow" + id: source-run-info + run: breeze ci get-workflow-info 2>> ${GITHUB_OUTPUT} + env: + SKIP_BREEZE_SELF_UPGRADE_CHECK: "true" + - name: Selective checks + id: selective-checks + env: + PR_LABELS: "${{ steps.source-run-info.outputs.pr-labels }}" + COMMIT_REF: "${{ github.sha }}" + VERBOSE: "false" + run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} + - name: env + run: printenv + env: + PR_LABELS: ${{ steps.source-run-info.outputs.pr-labels }} + GITHUB_CONTEXT: ${{ toJson(github) }} + + basic-tests: + name: "Basic tests" + needs: [build-info] + uses: ./.github/workflows/basic-tests.yml + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + run-ui-tests: ${{needs.build-info.outputs.run-ui-tests}} + run-www-tests: ${{needs.build-info.outputs.run-www-tests}} + needs-api-codegen: ${{needs.build-info.outputs.needs-api-codegen}} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + basic-checks-only: ${{needs.build-info.outputs.basic-checks-only}} + skip-pre-commits: ${{needs.build-info.outputs.skip-pre-commits}} + canary-run: ${{needs.build-info.outputs.canary-run}} + latest-versions-only: ${{needs.build-info.outputs.latest-versions-only}} + use-uv: ${{needs.build-info.outputs.use-uv}} + + build-ci-images: + name: Build CI images + needs: [build-info] + uses: ./.github/workflows/ci-image-build.yml + permissions: + contents: read + # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs + # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. + packages: write + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + push-image: "false" + upload-image-artifact: "true" + upload-mount-cache-artifact: ${{ needs.build-info.outputs.canary-run }} + python-versions: ${{ needs.build-info.outputs.python-versions }} + branch: ${{ needs.build-info.outputs.default-branch }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + if: needs.build-info.outputs.ci-image-build == 'true' + + additional-ci-image-checks: + name: "Additional CI image checks" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/additional-ci-image-checks.yml + permissions: + contents: read + packages: write + id-token: write + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + python-versions: ${{ needs.build-info.outputs.python-versions }} + branch: ${{ needs.build-info.outputs.default-branch }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + skip-pre-commits: ${{ needs.build-info.outputs.skip-pre-commits }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + + generate-constraints: + name: "Generate constraints" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/generate-constraints.yml + if: needs.build-info.outputs.ci-image-build == 'true' + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} + python-versions: ${{ needs.build-info.outputs.python-versions }} + generate-pypi-constraints: "true" + # generate no providers constraints only in canary builds - they take quite some time to generate + # they are not needed for regular builds, they are only needed to update constraints in canaries + generate-no-providers-constraints: ${{ needs.build-info.outputs.canary-run }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + + providers: + name: "provider distributions tests" + uses: ./.github/workflows/test-providers.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + if: > + needs.build-info.outputs.skip-providers-tests != 'true' && + needs.build-info.outputs.latest-versions-only != 'true' + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + canary-run: ${{ needs.build-info.outputs.canary-run }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + selected-providers-list-as-string: ${{ needs.build-info.outputs.selected-providers-list-as-string }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: > + ${{ needs.build-info.outputs.providers-compatibility-tests-matrix }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + python-versions: ${{ needs.build-info.outputs.python-versions }} + providers-test-types-list-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + + tests-helm: + name: "Helm tests" + uses: ./.github/workflows/helm-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + helm-test-packages: ${{ needs.build-info.outputs.helm-test-packages }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: > + needs.build-info.outputs.needs-helm-tests == 'true' && + needs.build-info.outputs.default-branch == 'main' && + needs.build-info.outputs.latest-versions-only != 'true' + + tests-postgres-core: + name: "Postgres tests: core" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + backend: "postgres" + test-name: "Postgres" + test-scope: "DB" + test-group: "core" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: ${{ needs.build-info.outputs.postgres-versions }} + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.postgres-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-migration-tests: "true" + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-postgres-providers: + name: "Postgres tests: providers" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + backend: "postgres" + test-name: "Postgres" + test-scope: "DB" + test-group: "providers" + python-versions: ${{ needs.build-info.outputs.python-versions }} + backend-versions: ${{ needs.build-info.outputs.postgres-versions }} + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.postgres-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-migration-tests: "true" + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-sqlite-core: + name: "Sqlite tests: core" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + backend: "sqlite" + test-name: "Sqlite" + test-name-separator: "" + test-scope: "DB" + test-group: "core" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for sqlite + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + run-migration-tests: "true" + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-sqlite-providers: + name: "Sqlite tests: providers" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + backend: "sqlite" + test-name: "Sqlite" + test-name-separator: "" + test-scope: "DB" + test-group: "providers" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for sqlite + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + run-migration-tests: "true" + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + + tests-non-db-core: + name: "Non-DB tests: core" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + backend: "sqlite" + test-name: "" + test-name-separator: "" + test-scope: "Non-DB" + test-group: "core" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for non-db + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + tests-non-db-providers: + name: "Non-DB tests: providers" + uses: ./.github/workflows/run-unit-tests.yml + needs: [build-info, build-ci-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + backend: "sqlite" + test-name: "" + test-name-separator: "" + test-scope: "Non-DB" + test-group: "providers" + python-versions: ${{ needs.build-info.outputs.python-versions }} + # No versions for non-db + backend-versions: "['']" + excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} + excludes: ${{ needs.build-info.outputs.sqlite-exclude }} + test-types-as-strings-in-json: > + ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + default-branch: ${{ needs.build-info.outputs.default-branch }} + if: needs.build-info.outputs.run-tests == 'true' + + build-prod-images: + name: Build PROD images + needs: [build-info, build-ci-images, generate-constraints] + uses: ./.github/workflows/prod-image-build.yml + permissions: + contents: read + # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs + # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. + packages: write + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + build-type: "Regular" + push-image: "false" + upload-image-artifact: "true" + upload-package-artifact: "true" + python-versions: ${{ needs.build-info.outputs.python-versions }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + branch: ${{ needs.build-info.outputs.default-branch }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + prod-image-build: ${{ needs.build-info.outputs.prod-image-build }} + + tests-kubernetes: + name: "Kubernetes tests" + uses: ./.github/workflows/k8s-tests.yml + needs: [build-info, build-prod-images] + permissions: + contents: read + packages: read + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + kubernetes-combos: ${{ needs.build-info.outputs.kubernetes-combos }} + if: > + ( needs.build-info.outputs.run-kubernetes-tests == 'true' || + needs.build-info.outputs.needs-helm-tests == 'true') + + tests-go-sdk: + name: "Go SDK tests" + needs: [build-info, build-ci-images] + runs-on: ${{ fromJSON(needs.build-info.outputs.arm-runners) }} + timeout-minutes: 15 + permissions: + contents: read + packages: read + if: > + ( needs.build-info.outputs.run-go-sdk-tests == 'true' || + needs.build-info.outputs.run-tests == 'true' && + needs.build-info.outputs.only-new-ui-files != 'true') + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + # keep this in sync with go.mod in go-sdk/ + - name: Setup Go + uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 + with: + go-version: 1.24 + cache-dependency-path: go-sdk/go.sum + + - name: "Cleanup dist files" + run: rm -fv ./dist/* + + - name: Run Go tests + working-directory: ./go-sdk + run: go test -v ./... + + finalize-tests: + name: Finalize tests + permissions: + contents: write + packages: write + # This will fire when all the jobs from "needs" are either successful or skipped + if: always() && !failure() && !cancelled() + needs: + - additional-ci-image-checks + - basic-tests + - build-info + - basic-tests + - generate-constraints + - build-prod-images + - providers + - tests-helm + - tests-kubernetes + - tests-non-db-core + - tests-non-db-providers + - tests-postgres-core + - tests-postgres-providers + - tests-sqlite-core + - tests-sqlite-providers + uses: ./.github/workflows/finalize-tests.yml + with: + runners: ${{ needs.build-info.outputs.arm-runners }} + platform: "linux/arm64" + python-versions: ${{ needs.build-info.outputs.python-versions }} + python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} + branch: ${{ needs.build-info.outputs.default-branch }} + constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} + default-python-version: "${{ needs.build-info.outputs.default-python-version }}" + upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + canary-run: ${{ needs.build-info.outputs.canary-run }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} + + notify-slack-failure: + name: "Notify Slack on Failure" + needs: + - finalize-tests + if: github.event_name == 'schedule' && failure() && github.run_attempt == 1 + runs-on: ["ubuntu-22.04"] + steps: + - name: Notify Slack + id: slack + uses: slackapi/slack-github-action@485a9d42d3a73031f12ec201c457e2162c45d02d # v2.0.0 + with: + method: chat.postMessage + token: ${{ env.SLACK_BOT_TOKEN }} + # yamllint disable rule:line-length + payload: | + channel: "internal-airflow-ci-cd" + text: "🚨🕒 Failure Alert: Scheduled CI (ARM) on branch *${{ github.ref_name }}* 🕒🚨\n\n*Details:* " + blocks: + - type: "section" + text: + type: "mrkdwn" + text: "🚨🕒 Failure Alert: Scheduled CI (ARM) 🕒🚨\n\n*Details:* " + # yamllint enable rule:line-length diff --git a/.github/workflows/ci-image-build.yml b/.github/workflows/ci-image-build.yml index c695778b87b99..e716f85c5ad79 100644 --- a/.github/workflows/ci-image-build.yml +++ b/.github/workflows/ci-image-build.yml @@ -20,12 +20,8 @@ name: Build CI images on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." - required: true - type: string - runs-on-as-json-self-hosted: - description: "The array of labels (in json form) determining self-hosted runners." + runners: + description: "The array of labels (in json form) determining runners." required: true type: string target-commit-sha: @@ -77,7 +73,7 @@ on: # yamllint disable-line rule:truthy required: true type: string branch: - description: "Branch used to run the CI jobs in (main/v2_*_test)." + description: "Branch used to run the CI jobs in (main/v*_*_test)." required: true type: string constraints-branch: @@ -106,17 +102,12 @@ jobs: python-version: ${{ fromJSON(inputs.python-versions) || fromJSON('[""]') }} timeout-minutes: 110 name: "Build CI ${{ inputs.platform }} image ${{ matrix.python-version }}" - # NOTE!!!!! This has to be put in one line for runs-on to recognize the "fromJSON" properly !!!! - # adding space before (with >) apparently turns the `runs-on` processed line into a string "Array" - # instead of an array of strings. - # yamllint disable-line rule:line-length - runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: BACKEND: sqlite PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} DEFAULT_BRANCH: ${{ inputs.branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ inputs.constraints-branch }} - VERSION_SUFFIX_FOR_PYPI: "dev0" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} @@ -126,15 +117,13 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout target branch" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - name: "Restore ci-cache mount image ${{ inputs.platform }}:${{ env.PYTHON_MAJOR_MINOR_VERSION }}" uses: apache/infrastructure-actions/stash/restore@1c35b5ccf8fba5d4c3fdf25a045ca91aa0cbc468 with: diff --git a/.github/workflows/ci-image-checks.yml b/.github/workflows/ci-image-checks.yml index 3e7a3bba9088a..7fd13495aa827 100644 --- a/.github/workflows/ci-image-checks.yml +++ b/.github/workflows/ci-image-checks.yml @@ -20,12 +20,12 @@ name: CI Image Checks on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + runners: + description: "The array of labels (in json form) determining runners." required: true type: string - runs-on-as-json-docs-build: - description: "The array of labels (in json form) determining the labels used for docs build." + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string needs-mypy: @@ -41,7 +41,7 @@ on: # yamllint disable-line rule:truthy required: true type: string branch: - description: "Branch used to run the CI jobs in (main/v2_*_test)." + description: "Branch used to run the CI jobs in (main/v*_*_test)." required: true type: string canary-run: @@ -60,10 +60,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to upgrade to newer dependencies (true/false)" required: true type: string - chicken-egg-providers: - description: "List of providers that should be prepared from sources" - required: false - type: string basic-checks-only: description: "Whether to run only basic checks (true/false)" required: true @@ -108,6 +104,14 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string + source-head-repo: + description: "The source head repository to use for back-references" + default: "apache/airflow" + type: string + source-head-ref: + description: "The source head ref to use for back-references" + default: "main" + type: string secrets: DOCS_AWS_ACCESS_KEY_ID: required: true @@ -121,7 +125,7 @@ jobs: install-pre-commit: timeout-minutes: 5 name: "Install pre-commit for cache (only canary runs)" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" if: inputs.basic-checks-only == 'false' @@ -131,14 +135,12 @@ jobs: run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" if: inputs.canary-run == 'true' - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false if: inputs.canary-run == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} id: breeze if: inputs.canary-run == 'true' - name: "Install pre-commit" @@ -165,7 +167,7 @@ jobs: static-checks: timeout-minutes: 45 name: "Static checks" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} needs: install-pre-commit env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -177,14 +179,14 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} id: breeze - name: "Install pre-commit" @@ -197,7 +199,7 @@ jobs: env: VERBOSE: "false" SKIP: ${{ inputs.skip-pre-commits }} - COLUMNS: "250" + COLUMNS: "202" SKIP_GROUP_OUTPUT: "true" DEFAULT_BRANCH: ${{ inputs.branch }} RUFF_FORMAT: "github" @@ -205,7 +207,7 @@ jobs: mypy: timeout-minutes: 45 name: "MyPy checks" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} needs: install-pre-commit if: inputs.needs-mypy == 'true' strategy: @@ -220,14 +222,14 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} id: breeze - name: "Install pre-commit" @@ -239,7 +241,7 @@ jobs: run: pre-commit run --color always --verbose --hook-stage manual "$MYPY_CHECK" --all-files env: VERBOSE: "false" - COLUMNS: "250" + COLUMNS: "202" SKIP_GROUP_OUTPUT: "true" DEFAULT_BRANCH: ${{ inputs.branch }} RUFF_FORMAT: "github" @@ -249,7 +251,7 @@ jobs: build-docs: timeout-minutes: 150 name: "Build documentation" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} if: inputs.docs-build == 'true' strategy: fail-fast: false @@ -268,39 +270,38 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "Restore docs inventory cache" uses: apache/infrastructure-actions/stash/restore@1c35b5ccf8fba5d4c3fdf25a045ca91aa0cbc468 with: path: ./generated/_inventory_cache/ - # TODO(potiuk): do better with determining the key - key: cache-docs-inventory-v1-${{ hashFiles('pyproject.toml') }} + key: cache-docs-inventory-v1-${{ hashFiles('**/pyproject.toml') }} id: restore-docs-inventory-cache - name: "Building docs with ${{ matrix.flag }} flag" env: DOCS_LIST_AS_STRING: ${{ inputs.docs-list-as-string }} run: > - breeze build-docs ${DOCS_LIST_AS_STRING} ${{ matrix.flag }} + breeze build-docs ${DOCS_LIST_AS_STRING} ${{ matrix.flag }} --refresh-airflow-inventories - name: "Save docs inventory cache" uses: apache/infrastructure-actions/stash/save@1c35b5ccf8fba5d4c3fdf25a045ca91aa0cbc468 with: path: ./generated/_inventory_cache/ - key: cache-docs-inventory-v1-${{ hashFiles('pyproject.toml') }} + key: cache-docs-inventory-v1-${{ hashFiles('**/pyproject.toml') }} if-no-files-found: 'error' retention-days: '2' # If we upload from multiple matrix jobs we could end up with a race condition. so just pick one job # to be responsible for updating it. https://github.com/actions/upload-artifact/issues/506 if: steps.restore-docs-inventory-cache != 'true' && matrix.flag == '--docs-only' - name: "Upload build docs" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: airflow-docs path: './generated/_build' @@ -310,12 +311,12 @@ jobs: publish-docs: timeout-minutes: 150 - name: "Publish documentation" + name: "Publish documentation and validate versions" permissions: id-token: write contents: read needs: build-docs - runs-on: ${{ fromJSON(inputs.runs-on-as-json-docs-build) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -324,36 +325,35 @@ jobs: INCLUDE_SUCCESS_OUTPUTS: "${{ inputs.include-success-outputs }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" - if: inputs.canary-run == 'true' && inputs.branch == 'main' + HEAD_REPO: "${{ inputs.source-head-repo }}" + HEAD_REF: "${{ inputs.source-head-ref }}" steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" + use-uv: ${{ inputs.use-uv }} - name: "Download docs prepared as artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: airflow-docs path: './generated/_build' - name: Check disk space available - run: df -h + run: df -H - name: Create /mnt/airflow-site directory run: sudo mkdir -p /mnt/airflow-site && sudo chown -R "${USER}" /mnt/airflow-site - name: "Clone airflow-site" run: > git clone https://github.com/apache/airflow-site.git /mnt/airflow-site/airflow-site && echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/airflow-site" >> "$GITHUB_ENV" - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" - uses: ./.github/actions/prepare_breeze_and_image - with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} - use-uv: ${{ inputs.use-uv }} - name: "Publish docs" env: DOCS_LIST_AS_STRING: ${{ inputs.docs-list-as-string }} @@ -361,7 +361,7 @@ jobs: breeze release-management publish-docs --override-versioned --run-in-parallel ${DOCS_LIST_AS_STRING} - name: Check disk space available - run: df -h + run: df -H - name: "Generate back references for providers" run: breeze release-management add-back-references all-providers - name: "Generate back references for apache-airflow" @@ -370,6 +370,11 @@ jobs: run: breeze release-management add-back-references docker-stack - name: "Generate back references for helm-chart" run: breeze release-management add-back-references helm-chart + - name: "Validate published doc versions" + id: validate-docs-versions + run: cd ./dev/breeze && uv run ./src/airflow_breeze/utils/docs_version_validation.py + env: + AIRFLOW_SITE_DIRECTORY: /mnt/airflow-site/airflow-site - name: Install AWS CLI v2 run: | curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o /tmp/awscliv2.zip @@ -377,19 +382,28 @@ jobs: rm /tmp/awscliv2.zip sudo /tmp/aws/install --update rm -rf /tmp/aws/ + if: > + inputs.canary-run == 'true' && + (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 with: aws-access-key-id: ${{ secrets.DOCS_AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.DOCS_AWS_SECRET_ACCESS_KEY }} aws-region: eu-central-1 + if: > + inputs.canary-run == 'true' && + (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') - name: "Upload documentation to AWS S3" run: aws s3 sync --delete ./generated/_build s3://apache-airflow-docs + if: > + inputs.canary-run == 'true' && + (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') test-python-api-client: timeout-minutes: 60 name: "Test Python API client" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} if: inputs.needs-api-codegen == 'true' env: BACKEND: "postgres" @@ -407,14 +421,12 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 2 persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: repository: "apache/airflow-client-python" fetch-depth: 1 @@ -423,13 +435,13 @@ jobs: - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "Generate airflow python client" run: > breeze release-management prepare-python-client --distribution-format both - --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python + --python-client-repo ./airflow-client-python - name: "Show diff" run: git diff --color HEAD working-directory: ./airflow-client-python diff --git a/.github/workflows/ci-notification.yml b/.github/workflows/ci-notification.yml new file mode 100644 index 0000000000000..60c2476c2dd81 --- /dev/null +++ b/.github/workflows/ci-notification.yml @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: "CI Notification" +on: # yamllint disable-line rule:truthy + schedule: + - cron: '0 6,17 * * *' + workflow_dispatch: +permissions: + # All other permissions are set to none by default + contents: read +env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + VERBOSE: "true" + +jobs: + + workflow-status: + strategy: + matrix: + branch: ["v3-0-test"] + workflow-id: ["ci-amd.yml", "ci-arm.yml"] + runs-on: ubuntu-latest + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + - name: "Install Python 3.11 as 3.11+ is needed by pin-versions pre-commit" + uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c # v4.9.1 + with: + python-version: 3.11 + + - name: "Find workflow run status" + id: find-workflow-run-status + run: | + python3 -m pip install uv + uv run ./dev/breeze/src/airflow_breeze/utils/workflow_status.py + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + workflow_branch: ${{ matrix.branch }} + workflow_id: ${{ matrix.workflow-id }} + + - name: "Send Slack notification" + if: steps.find-workflow-run-status.outputs.conclusion == 'failure' + id: slack + uses: slackapi/slack-github-action@485a9d42d3a73031f12ec201c457e2162c45d02d # v2.0.0 + with: + method: chat.postMessage + token: ${{ env.SLACK_BOT_TOKEN }} + # yamllint disable rule:line-length + payload: | + channel: "internal-airflow-ci-cd" + text: "🚨🕒 Failure Alert: ${{ env.workflow_id }} on branch *${{ env.branch }}* 🕒🚨\n\n*Details:* <${{ env.run_url }}|View the failure log>" + blocks: + - type: "section" + text: + type: "mrkdwn" + text: "🚨🕒 Failure Alert: ${{ env.workflow_id }} ${{ env.branch }} 🕒🚨\n\n*Details:* <${{ env.run_url }}|View the failure log>" + env: + run_url: ${{ steps.find-workflow-run-status.outputs.run-url }} + branch: ${{ matrix.branch }} + workflow_id: ${{ matrix.workflow-id }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 406132c3a0b73..0000000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,851 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Tests -on: # yamllint disable-line rule:truthy - schedule: - - cron: '28 1,7,13,19 * * *' - push: - branches: - - v[0-9]+-[0-9]+-test - - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ - pull_request: - branches: - - main - - v[0-9]+-[0-9]+-test - - v[0-9]+-[0-9]+-stable - - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ - types: [opened, reopened, synchronize, ready_for_review] - workflow_dispatch: -permissions: - # All other permissions are set to none by default - contents: read -env: - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_USERNAME: ${{ github.actor }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - VERBOSE: "true" - -concurrency: - group: ci-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - - build-info: - name: "Build info" - # At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners - runs-on: ["ubuntu-22.04"] - env: - GITHUB_CONTEXT: ${{ toJson(github) }} - outputs: - all-python-versions-list-as-string: >- - ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} - basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} - canary-run: ${{ steps.source-run-info.outputs.canary-run }} - chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} - ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} - core-test-types-list-as-strings-in-json: >- - ${{ steps.selective-checks.outputs.core-test-types-list-as-strings-in-json }} - debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} - default-branch: ${{ steps.selective-checks.outputs.default-branch }} - default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} - default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} - default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} - default-kubernetes-version: ${{ steps.selective-checks.outputs.default-kubernetes-version }} - default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} - default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} - default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} - disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} - docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} - docs-build: ${{ steps.selective-checks.outputs.docs-build }} - docs-list-as-string: ${{ steps.selective-checks.outputs.docs-list-as-string }} - excluded-providers-as-string: ${{ steps.selective-checks.outputs.excluded-providers-as-string }} - force-pip: ${{ steps.selective-checks.outputs.force-pip }} - full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} - has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} - helm-test-packages: ${{ steps.selective-checks.outputs.helm-test-packages }} - include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} - individual-providers-test-types-list-as-strings-in-json: >- - ${{ steps.selective-checks.outputs.individual-providers-test-types-list-as-strings-in-json }} - is-airflow-runner: ${{ steps.selective-checks.outputs.is-airflow-runner }} - is-amd-runner: ${{ steps.selective-checks.outputs.is-amd-runner }} - is-arm-runner: ${{ steps.selective-checks.outputs.is-arm-runner }} - is-k8s-runner: ${{ steps.selective-checks.outputs.is-k8s-runner }} - is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} - is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} - kubernetes-combos: ${{ steps.selective-checks.outputs.kubernetes-combos }} - kubernetes-combos-list-as-string: >- - ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} - kubernetes-versions-list-as-string: >- - ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} - latest-versions-only: ${{ steps.selective-checks.outputs.latest-versions-only }} - mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} - mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} - mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} - needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} - needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} - needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} - needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} - only-new-ui-files: ${{ steps.selective-checks.outputs.only-new-ui-files }} - postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} - postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} - prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} - # yamllint disable rule:line-length - providers-compatibility-tests-matrix: > - ${{ steps.selective-checks.outputs.providers-compatibility-tests-matrix }} - providers-test-types-list-as-strings-in-json: >- - ${{ steps.selective-checks.outputs.providers-test-types-list-as-strings-in-json }} - pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }} - python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} - python-versions: ${{ steps.selective-checks.outputs.python-versions }} - run-amazon-tests: ${{ steps.selective-checks.outputs.run-amazon-tests }} - run-airflow-ctl-tests: ${{ steps.selective-checks.outputs.run-airflow-ctl-tests }} - run-coverage: ${{ steps.source-run-info.outputs.run-coverage }} - run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} - run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} - run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} - run-tests: ${{ steps.selective-checks.outputs.run-tests }} - run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} - run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} - runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} - runs-on-as-json-docs-build: ${{ steps.selective-checks.outputs.runs-on-as-json-docs-build }} - runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted-asf: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted-asf }} - runs-on-as-json-self-hosted: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted }} - selected-providers-list-as-string: >- - ${{ steps.selective-checks.outputs.selected-providers-list-as-string }} - skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} - skip-providers-tests: ${{ steps.selective-checks.outputs.skip-providers-tests }} - source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} - sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} - testable-core-integrations: ${{ steps.selective-checks.outputs.testable-core-integrations }} - testable-providers-integrations: ${{ steps.selective-checks.outputs.testable-providers-integrations }} - use-uv: ${{ steps.selective-checks.outputs.force-pip == 'true' && 'false' || 'true' }} - upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: Fetch incoming commit ${{ github.sha }} with its parent - uses: actions/checkout@v4 - with: - ref: ${{ github.sha }} - fetch-depth: 2 - persist-credentials: false - - name: "Install Breeze" - uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - id: breeze - - name: "Get information about the Workflow" - id: source-run-info - run: breeze ci get-workflow-info 2>> ${GITHUB_OUTPUT} - env: - SKIP_BREEZE_SELF_UPGRADE_CHECK: "true" - - name: Selective checks - id: selective-checks - env: - PR_LABELS: "${{ steps.source-run-info.outputs.pr-labels }}" - COMMIT_REF: "${{ github.sha }}" - VERBOSE: "false" - run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} - - name: env - run: printenv - env: - PR_LABELS: ${{ steps.source-run-info.outputs.pr-labels }} - GITHUB_CONTEXT: ${{ toJson(github) }} - - basic-tests: - name: "Basic tests" - needs: [build-info] - uses: ./.github/workflows/basic-tests.yml - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - run-ui-tests: ${{needs.build-info.outputs.run-ui-tests}} - run-www-tests: ${{needs.build-info.outputs.run-www-tests}} - needs-api-codegen: ${{needs.build-info.outputs.needs-api-codegen}} - default-python-version: ${{needs.build-info.outputs.default-python-version}} - basic-checks-only: ${{needs.build-info.outputs.basic-checks-only}} - skip-pre-commits: ${{needs.build-info.outputs.skip-pre-commits}} - canary-run: ${{needs.build-info.outputs.canary-run}} - latest-versions-only: ${{needs.build-info.outputs.latest-versions-only}} - use-uv: ${{needs.build-info.outputs.use-uv}} - - build-ci-images: - name: Build CI images - needs: [build-info] - uses: ./.github/workflows/ci-image-build.yml - permissions: - contents: read - # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs - # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - packages: write - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - platform: "linux/amd64" - push-image: "false" - upload-image-artifact: "true" - upload-mount-cache-artifact: ${{ needs.build-info.outputs.canary-run }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - branch: ${{ needs.build-info.outputs.default-branch }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - if: needs.build-info.outputs.ci-image-build == 'true' - - additional-ci-image-checks: - name: "Additional CI image checks" - needs: [build-info, build-ci-images] - uses: ./.github/workflows/additional-ci-image-checks.yml - permissions: - contents: read - packages: write - id-token: write - if: needs.build-info.outputs.canary-run == 'true' - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - branch: ${{ needs.build-info.outputs.default-branch }} - constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - skip-pre-commits: ${{ needs.build-info.outputs.skip-pre-commits }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - - generate-constraints: - name: "Generate constraints" - needs: [build-info, build-ci-images] - uses: ./.github/workflows/generate-constraints.yml - if: needs.build-info.outputs.ci-image-build == 'true' - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - # generate no providers constraints only in canary builds - they take quite some time to generate - # they are not needed for regular builds, they are only needed to update constraints in canaries - generate-no-providers-constraints: ${{ needs.build-info.outputs.canary-run }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - - ci-image-checks: - name: "CI image checks" - needs: [build-info, build-ci-images] - uses: ./.github/workflows/ci-image-checks.yml - permissions: - id-token: write - contents: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - runs-on-as-json-docs-build: ${{ needs.build-info.outputs.runs-on-as-json-docs-build }} - needs-mypy: ${{ needs.build-info.outputs.needs-mypy }} - mypy-checks: ${{ needs.build-info.outputs.mypy-checks }} - python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - branch: ${{ needs.build-info.outputs.default-branch }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - docs-list-as-string: ${{ needs.build-info.outputs.docs-list-as-string }} - latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} - basic-checks-only: ${{ needs.build-info.outputs.basic-checks-only }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - skip-pre-commits: ${{ needs.build-info.outputs.skip-pre-commits }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - ci-image-build: ${{ needs.build-info.outputs.ci-image-build }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - docs-build: ${{ needs.build-info.outputs.docs-build }} - needs-api-codegen: ${{ needs.build-info.outputs.needs-api-codegen }} - default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - secrets: - DOCS_AWS_ACCESS_KEY_ID: ${{ secrets.DOCS_AWS_ACCESS_KEY_ID }} - DOCS_AWS_SECRET_ACCESS_KEY: ${{ secrets.DOCS_AWS_SECRET_ACCESS_KEY }} - - providers: - name: "provider distributions tests" - uses: ./.github/workflows/test-providers.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - if: > - needs.build-info.outputs.skip-providers-tests != 'true' && - needs.build-info.outputs.latest-versions-only != 'true' - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - selected-providers-list-as-string: ${{ needs.build-info.outputs.selected-providers-list-as-string }} - # yamllint disable rule:line-length - providers-compatibility-tests-matrix: > - ${{ needs.build-info.outputs.providers-compatibility-tests-matrix }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - providers-test-types-list-as-strings-in-json: > - ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - - tests-helm: - name: "Helm tests" - uses: ./.github/workflows/helm-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - helm-test-packages: ${{ needs.build-info.outputs.helm-test-packages }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: > - needs.build-info.outputs.needs-helm-tests == 'true' && - needs.build-info.outputs.default-branch == 'main' && - needs.build-info.outputs.latest-versions-only != 'true' - - tests-postgres-core: - name: "Postgres tests: core" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "postgres" - test-name: "Postgres" - test-scope: "DB" - test-group: "core" - python-versions: ${{ needs.build-info.outputs.python-versions }} - backend-versions: ${{ needs.build-info.outputs.postgres-versions }} - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.postgres-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-migration-tests: "true" - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - tests-postgres-providers: - name: "Postgres tests: providers" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "postgres" - test-name: "Postgres" - test-scope: "DB" - test-group: "providers" - python-versions: ${{ needs.build-info.outputs.python-versions }} - backend-versions: ${{ needs.build-info.outputs.postgres-versions }} - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.postgres-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-migration-tests: "true" - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - tests-mysql-core: - name: "MySQL tests: core" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "mysql" - test-name: "MySQL" - test-scope: "DB" - test-group: "core" - python-versions: ${{ needs.build-info.outputs.python-versions }} - backend-versions: ${{ needs.build-info.outputs.mysql-versions }} - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.mysql-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - run-migration-tests: "true" - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - tests-mysql-providers: - name: "MySQL tests: providers" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "mysql" - test-name: "MySQL" - test-scope: "DB" - test-group: "providers" - python-versions: ${{ needs.build-info.outputs.python-versions }} - backend-versions: ${{ needs.build-info.outputs.mysql-versions }} - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.mysql-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - run-migration-tests: "true" - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - - tests-sqlite-core: - name: "Sqlite tests: core" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "sqlite" - test-name: "Sqlite" - test-name-separator: "" - test-scope: "DB" - test-group: "core" - python-versions: ${{ needs.build-info.outputs.python-versions }} - # No versions for sqlite - backend-versions: "['']" - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - run-migration-tests: "true" - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - tests-sqlite-providers: - name: "Sqlite tests: providers" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "sqlite" - test-name: "Sqlite" - test-name-separator: "" - test-scope: "DB" - test-group: "providers" - python-versions: ${{ needs.build-info.outputs.python-versions }} - # No versions for sqlite - backend-versions: "['']" - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - run-migration-tests: "true" - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - - tests-non-db-core: - name: "Non-DB tests: core" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "sqlite" - test-name: "" - test-name-separator: "" - test-scope: "Non-DB" - test-group: "core" - python-versions: ${{ needs.build-info.outputs.python-versions }} - # No versions for non-db - backend-versions: "['']" - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - tests-non-db-providers: - name: "Non-DB tests: providers" - uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - backend: "sqlite" - test-name: "" - test-name-separator: "" - test-scope: "Non-DB" - test-group: "providers" - python-versions: ${{ needs.build-info.outputs.python-versions }} - # No versions for non-db - backend-versions: "['']" - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - tests-special: - name: "Special tests" - uses: ./.github/workflows/special-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - if: > - needs.build-info.outputs.run-tests == 'true' && - (needs.build-info.outputs.canary-run == 'true' || - needs.build-info.outputs.upgrade-to-newer-dependencies != 'false' || - needs.build-info.outputs.full-tests-needed == 'true') - with: - default-branch: ${{ needs.build-info.outputs.default-branch }} - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - core-test-types-list-as-strings-in-json: > - ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} - providers-test-types-list-as-strings-in-json: > - ${{ needs.build-info.outputs.providers-test-types-list-as-strings-in-json }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - - tests-integration-system: - name: Integration and System Tests - needs: [build-info, build-ci-images] - uses: ./.github/workflows/integration-system-tests.yml - permissions: - contents: read - packages: read - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - testable-core-integrations: ${{ needs.build-info.outputs.testable-core-integrations }} - testable-providers-integrations: ${{ needs.build-info.outputs.testable-providers-integrations }} - run-system-tests: ${{ needs.build-info.outputs.run-tests }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} - default-mysql-version: ${{ needs.build-info.outputs.default-mysql-version }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.run-tests == 'true' - - tests-with-lowest-direct-resolution-core: - name: "Low dep tests:core" - needs: [build-info, build-ci-images] - uses: ./.github/workflows/run-unit-tests.yml - permissions: - contents: read - packages: read - if: > - needs.build-info.outputs.run-tests == 'true' - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - test-name: "LowestDeps" - force-lowest-dependencies: "true" - test-scope: "All" - test-group: "core" - backend: "sqlite" - python-versions: ${{ needs.build-info.outputs.python-versions }} - backend-versions: "['${{ needs.build-info.outputs.default-postgres-version }}']" - excluded-providers-as-string: "" - excludes: "[]" - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.core-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - monitor-delay-time-in-seconds: 120 - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - - tests-with-lowest-direct-resolution-providers: - name: "Low dep tests: providers" - needs: [build-info, build-ci-images] - uses: ./.github/workflows/run-unit-tests.yml - permissions: - contents: read - packages: read - if: needs.build-info.outputs.run-tests == 'true' - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - test-name: "LowestDeps" - force-lowest-dependencies: "true" - test-scope: "All" - test-group: "providers" - backend: "sqlite" - python-versions: ${{ needs.build-info.outputs.python-versions }} - backend-versions: "['${{ needs.build-info.outputs.default-postgres-version }}']" - excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} - excludes: "[]" - test-types-as-strings-in-json: > - ${{ needs.build-info.outputs.individual-providers-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ needs.build-info.outputs.run-coverage }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - monitor-delay-time-in-seconds: 120 - skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - - build-prod-images: - name: Build PROD images - needs: [build-info, build-ci-images, generate-constraints] - uses: ./.github/workflows/prod-image-build.yml - permissions: - contents: read - # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs - # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - packages: write - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - build-type: "Regular" - platform: "linux/amd64" - push-image: "false" - upload-image-artifact: "true" - upload-package-artifact: "true" - python-versions: ${{ needs.build-info.outputs.python-versions }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - branch: ${{ needs.build-info.outputs.default-branch }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - build-provider-distributions: ${{ needs.build-info.outputs.default-branch == 'main' }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - prod-image-build: ${{ needs.build-info.outputs.prod-image-build }} - - additional-prod-image-tests: - name: "Additional PROD image tests" - needs: [build-info, build-prod-images, generate-constraints] - uses: ./.github/workflows/additional-prod-image-tests.yml - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - default-branch: ${{ needs.build-info.outputs.default-branch }} - constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - if: needs.build-info.outputs.prod-image-build == 'true' - - tests-kubernetes: - name: "Kubernetes tests" - uses: ./.github/workflows/k8s-tests.yml - needs: [build-info, build-prod-images] - permissions: - contents: read - packages: read - with: - platform: "linux/amd64" - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - kubernetes-combos: ${{ needs.build-info.outputs.kubernetes-combos }} - if: > - ( needs.build-info.outputs.run-kubernetes-tests == 'true' || - needs.build-info.outputs.needs-helm-tests == 'true') - - tests-task-sdk: - name: "Task SDK tests" - uses: ./.github/workflows/airflow-distributions-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - distribution-name: "task-sdk" - distribution-cmd-format: "prepare-task-sdk-distributions" - test-type: "task-sdk-tests" - if: > - ( needs.build-info.outputs.run-task-sdk-tests == 'true' || - needs.build-info.outputs.run-tests == 'true' && - needs.build-info.outputs.only-new-ui-files != 'true') - - tests-airflow-ctl: - name: "Airflow CTL tests" - uses: ./.github/workflows/airflow-distributions-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - distribution-name: "airflow-ctl" - distribution-cmd-format: "prepare-airflow-ctl-distributions" - test-type: "airflow-ctl-tests" - if: > - ( needs.build-info.outputs.run-airflow-ctl-tests == 'true' || - needs.build-info.outputs.run-tests == 'true' && - needs.build-info.outputs.only-new-ui-files != 'true') - - finalize-tests: - name: Finalize tests - permissions: - contents: write - packages: write - # This will fire when all the jobs from "needs" are either successful or skipped - if: always() && !failure() && !cancelled() - needs: - - build-info - - generate-constraints - - ci-image-checks - - tests-sqlite-core - - tests-sqlite-providers - - tests-mysql-core - - tests-mysql-providers - - tests-postgres-core - - tests-postgres-providers - - tests-non-db-core - - tests-non-db-providers - - tests-integration-system - - build-prod-images - uses: ./.github/workflows/finalize-tests.yml - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - branch: ${{ needs.build-info.outputs.default-branch }} - constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - - notify-slack-failure: - name: "Notify Slack on Failure" - needs: - - basic-tests - - additional-ci-image-checks - - providers - - tests-helm - - tests-special - - tests-with-lowest-direct-resolution-core - - tests-with-lowest-direct-resolution-providers - - additional-prod-image-tests - - tests-kubernetes - - tests-task-sdk - - tests-airflow-ctl - - finalize-tests - if: github.event_name == 'schedule' && failure() && github.run_attempt == 1 - runs-on: ["ubuntu-22.04"] - steps: - - name: Notify Slack - id: slack - uses: slackapi/slack-github-action@485a9d42d3a73031f12ec201c457e2162c45d02d # v2.0.0 - with: - method: chat.postMessage - token: ${{ env.SLACK_BOT_TOKEN }} - # yamllint disable rule:line-length - payload: | - channel: "internal-airflow-ci-cd" - text: "🚨🕒 Scheduled CI Failure Alert 🕒🚨\n\n*Details:* " - blocks: - - type: "section" - text: - type: "mrkdwn" - text: "🚨🕒 Scheduled CI Failure Alert 🕒🚨\n\n*Details:* " - # yamllint enable rule:line-length diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 1fcf81a84fd5b..28c8cfae81a07 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -47,17 +47,17 @@ jobs: security-events: write steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 with: languages: ${{ matrix.language }} - name: Autobuild - uses: github/codeql-action/autobuild@v3 + uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 diff --git a/.github/workflows/finalize-tests.yml b/.github/workflows/finalize-tests.yml index 47db38269b35f..4fdbc777f22c8 100644 --- a/.github/workflows/finalize-tests.yml +++ b/.github/workflows/finalize-tests.yml @@ -20,12 +20,12 @@ name: Finalize tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." + runners: + description: "The array of labels (in json form) determining runners." required: true type: string - runs-on-as-json-self-hosted: - description: "The array of labels (in json form) determining self-hosted runners." + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string python-versions: @@ -80,7 +80,7 @@ permissions: contents: read jobs: update-constraints: - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} timeout-minutes: 80 name: "Update constraints" permissions: @@ -93,32 +93,32 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} VERBOSE: "true" - if: inputs.upgrade-to-newer-dependencies != 'false' + if: inputs.upgrade-to-newer-dependencies != 'false' && inputs.platform == 'linux/amd64' steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: # Needed to perform push action persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Set constraints branch name" id: constraints-branch run: ./scripts/ci/constraints/ci_branch_constraints.sh >> ${GITHUB_OUTPUT} - name: Checkout ${{ steps.constraints-branch.outputs.branch }} - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: path: "constraints" ref: ${{ steps.constraints-branch.outputs.branch }} persist-credentials: true fetch-depth: 0 - name: "Download constraints from the constraints generated by build CI image" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: - name: constraints + pattern: constraints-* path: ./files - name: "Diff in constraints for Python: ${{ inputs.python-versions-list-as-string }}" run: ./scripts/ci/constraints/ci_diff_constraints.sh @@ -127,25 +127,62 @@ jobs: run: ./scripts/ci/constraints/ci_commit_constraints.sh if: inputs.canary-run == 'true' - name: "Push changes" - if: inputs.canary-run == 'true' + if: inputs.canary-run == 'true' && github.event_name != 'pull_request' working-directory: "constraints" run: git push - push-buildx-cache-to-github-registry-amd: - name: Push Regular AMD Image Cache + dependency-upgrade-summary: + runs-on: ${{ fromJSON(inputs.runners) }} + needs: [update-constraints] + if: inputs.upgrade-to-newer-dependencies == 'true' && inputs.platform == 'linux/amd64' + name: "Deps ${{ matrix.python-version }}:${{ matrix.constraints-mode }}" + strategy: + matrix: + python-version: ${{ fromJson(inputs.python-versions) }} + constraints-mode: ["constraints", "constraints-source-providers", "constraints-no-providers"] + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: ${{ inputs.platform }} + python: ${{ matrix.python-version }} + use-uv: ${{ inputs.use-uv }} + - name: "Deps: ${{ matrix.python-version }}:${{ matrix.constraints-mode }}" + shell: bash + run: > + breeze release-management constraints-version-check + --python "${MATRIX_PYTHON_VERSION}" + --airflow-constraints-mode "${MATRIX_CONSTRAINTS_MODE}" --explain-why + env: + MATRIX_PYTHON_VERSION: "${{ matrix.python-version }}" + MATRIX_CONSTRAINTS_MODE: "${{ matrix.constraints-mode }}" + VERBOSE: "false" + + push-buildx-cache-to-github-registry: + name: Push Regular Image Cache ${{ inputs.platform }} needs: [update-constraints] uses: ./.github/workflows/push-image-cache.yml permissions: contents: read + # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs + # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. packages: write with: - runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ inputs.runs-on-as-json-self-hosted }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} cache-type: "Regular AMD" include-prod-images: "true" push-latest-images: "true" - platform: "linux/amd64" python-versions: ${{ inputs.python-versions }} branch: ${{ inputs.branch }} constraints-branch: ${{ inputs.constraints-branch }} @@ -153,66 +190,4 @@ jobs: include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} - if: inputs.canary-run == 'true' - - # push-buildx-cache-to-github-registry-arm: - # name: Push Regular ARM Image Cache - # needs: [update-constraints] - # uses: ./.github/workflows/push-image-cache.yml - # permissions: - # contents: read - # packages: write - # with: - # runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} - # runs-on-as-json-self-hosted: ${{ inputs.runs-on-as-json-self-hosted }} - # cache-type: "Regular ARM" - # include-prod-images: "true" - # push-latest-images: "true" - # platform: "linux/arm64" - # python-versions: ${{ inputs.python-versions }} - # branch: ${{ inputs.branch }} - # constraints-branch: ${{ inputs.constraints-branch }} - # use-uv: "true" - # include-success-outputs: ${{ inputs.include-success-outputs }} - # docker-cache: ${{ inputs.docker-cache }} - # if: inputs.canary-run == 'true' - - summarize-warnings: - timeout-minutes: 15 - name: "Summarize warnings" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Free up disk space" - shell: bash - run: ./scripts/tools/free_up_disk_space.sh - - name: "Download all test warning artifacts from the current build" - uses: actions/download-artifact@v4 - with: - path: ./artifacts - pattern: test-warnings-* - - name: "Setup python" - uses: actions/setup-python@v5 - with: - python-version: ${{ inputs.default-python-version }} - - name: "Summarize all warnings" - run: | - ./scripts/ci/testing/summarize_captured_warnings.py ./artifacts \ - --pattern "**/warnings-*.txt" \ - --output ./files - - name: "Upload artifact for summarized warnings" - uses: actions/upload-artifact@v4 - with: - name: test-summarized-warnings - path: ./files/warn-summary-*.txt - retention-days: 7 - if-no-files-found: ignore - overwrite: true + if: inputs.canary-run == 'true' && github.event_name != 'pull_request' diff --git a/.github/workflows/generate-constraints.yml b/.github/workflows/generate-constraints.yml index 0f74081686316..d2d0db0685a58 100644 --- a/.github/workflows/generate-constraints.yml +++ b/.github/workflows/generate-constraints.yml @@ -20,20 +20,28 @@ name: Generate constraints on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." + runners: + description: "The array of labels (in json form) determining runners." + required: true + type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string python-versions-list-as-string: description: "Stringified array of all Python versions to test - separated by spaces." required: true type: string + python-versions: + description: "JSON-formatted array of Python versions to generate constraints for" + required: true + type: string generate-no-providers-constraints: description: "Whether to generate constraints without providers (true/false)" required: true type: string - chicken-egg-providers: - description: "Space-separated list of providers that should be installed from context files" + generate-pypi-constraints: + description: "Whether to generate PyPI constraints (true/false)" required: true type: string debug-resources: @@ -45,107 +53,86 @@ on: # yamllint disable-line rule:truthy required: true type: string jobs: - generate-constraints: + generate-constraints-matrix: permissions: contents: read timeout-minutes: 70 - name: Generate constraints ${{ inputs.python-versions-list-as-string }} - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + name: Generate constraints for ${{ matrix.python-version }} on ${{ inputs.platform }} + runs-on: ${{ fromJSON(inputs.runners) }} + strategy: + matrix: + python-version: ${{ fromJson(inputs.python-versions) }} env: DEBUG_RESOURCES: ${{ inputs.debug-resources }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} INCLUDE_SUCCESS_OUTPUTS: "true" - PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} + PYTHON_VERSION: ${{ matrix.python-version }} VERBOSE: "true" - VERSION_SUFFIX_FOR_PYPI: "dev0" steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - shell: bash - - name: "Install Breeze" - uses: ./.github/actions/breeze + - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" + uses: ./.github/actions/prepare_breeze_and_image with: + platform: ${{ inputs.platform }} + python: ${{ matrix.python-version }} use-uv: ${{ inputs.use-uv }} - id: breeze - - name: "Prepare all CI images: ${{ inputs.python-versions-list-as-string}}" - uses: ./.github/actions/prepare_all_ci_images - with: - platform: "linux/amd64" - python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} - docker-volume-location: "" # TODO(jscheffl): Understand why it fails here and fix it - - name: "Verify all CI images ${{ inputs.python-versions-list-as-string }}" - run: breeze ci-image verify --run-in-parallel - name: "Source constraints" shell: bash run: > - breeze release-management generate-constraints --run-in-parallel + breeze release-management generate-constraints --airflow-constraints-mode constraints-source-providers --answer yes + --python "${PYTHON_VERSION}" - name: "No providers constraints" shell: bash timeout-minutes: 25 run: > - breeze release-management generate-constraints --run-in-parallel - --airflow-constraints-mode constraints-no-providers --answer yes --parallelism 3 - # The no providers constraints are only needed when we want to update constraints (in canary builds) - # They slow down the start of PROD image builds so we want to only run them when needed. + breeze release-management generate-constraints + --airflow-constraints-mode constraints-no-providers --answer yes + --python "${PYTHON_VERSION}" if: inputs.generate-no-providers-constraints == 'true' - - name: "Prepare chicken-eggs provider distributions" - # In case of provider distributions which use latest dev0 version of providers, we should prepare them - # from the source code, not from the PyPI because they have apache-airflow>=X.Y.Z dependency - # And when we prepare them from sources they will have apache-airflow>=X.Y.Z.dev0 + - name: "Prepare updated provider distributions" shell: bash - env: - CHICKEN_EGG_PROVIDERS: ${{ inputs.chicken-egg-providers }} run: > - breeze release-management prepare-provider-distributions --include-not-ready-providers - --distribution-format wheel --version-suffix-for-pypi dev0 - ${CHICKEN_EGG_PROVIDERS} - if: inputs.chicken-egg-providers != '' + breeze release-management prepare-provider-distributions + --include-not-ready-providers --distribution-format wheel + if: inputs.generate-pypi-constraints == 'true' - name: "Prepare airflow distributions" shell: bash run: > - breeze release-management prepare-airflow-distributions - --distribution-format wheel --version-suffix-for-pypi dev0 + breeze release-management prepare-airflow-distributions --distribution-format wheel + if: inputs.generate-pypi-constraints == 'true' - name: "Prepare task-sdk distribution" shell: bash run: > - breeze release-management prepare-task-sdk-distributions - --distribution-format wheel --version-suffix-for-pypi dev0 + breeze release-management prepare-task-sdk-distributions --distribution-format wheel + if: inputs.generate-pypi-constraints == 'true' - name: "PyPI constraints" shell: bash timeout-minutes: 25 - env: - CHICKEN_EGG_PROVIDERS: ${{ inputs.chicken-egg-providers }} run: | - for PYTHON in $PYTHON_VERSIONS; do - breeze release-management generate-constraints \ - --airflow-constraints-mode constraints --answer yes \ - --chicken-egg-providers "${CHICKEN_EGG_PROVIDERS}" \ - --python "${PYTHON}" - done - - name: "Dependency upgrade summary" - shell: bash - env: - PYTHON_VERSIONS: ${{ env.PYTHON_VERSIONS }} - run: | - for PYTHON_VERSION in $PYTHON_VERSIONS; do - echo "Summarizing Python $PYTHON_VERSION" - cat "files/constraints-${PYTHON_VERSION}"/*.md >> $GITHUB_STEP_SUMMARY || true - df -H - done + breeze release-management generate-constraints --airflow-constraints-mode constraints \ + --answer yes --python "${PYTHON_VERSION}" + if: inputs.generate-pypi-constraints == 'true' - name: "Upload constraint artifacts" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: - name: constraints - path: ./files/constraints-*/constraints-*.txt + name: constraints-${{ matrix.python-version }} + path: ./files/constraints-${{ matrix.python-version }}/constraints-*.txt retention-days: 7 if-no-files-found: error + - name: "Dependency upgrade summary" + shell: bash + env: + PYTHON_VERSION: ${{ matrix.python-version }} + run: | + echo "Summarizing Python $PYTHON_VERSION" + cat "files/constraints-${PYTHON_VERSION}"/*.md >> $GITHUB_STEP_SUMMARY || true + df -H diff --git a/.github/workflows/helm-tests.yml b/.github/workflows/helm-tests.yml index 1b4aa19cbe595..46e91cf9cf1db 100644 --- a/.github/workflows/helm-tests.yml +++ b/.github/workflows/helm-tests.yml @@ -20,12 +20,12 @@ name: Helm tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + runners: + description: "The array of labels (in json form) determining runners." required: true type: string - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string helm-test-packages: @@ -46,7 +46,7 @@ jobs: tests-helm: timeout-minutes: 80 name: "Unit tests Helm: ${{ matrix.helm-test-package }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: fail-fast: false matrix: @@ -68,14 +68,14 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "Helm Unit Tests: ${{ matrix.helm-test-package }}" env: @@ -85,27 +85,26 @@ jobs: tests-helm-release: timeout-minutes: 80 name: "Release Helm" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{inputs.default-python-version}}" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - name: Setup git for tagging run: | - git config --global user.email "name@example.com" - git config --global user.name "Your Name" + git config --global user.email "bot@airflow.apache.org" + git config --global user.name "Your friendly bot" - name: "Remove old artifacts" run: rm -rf dist/* - name: "Setup k8s/helm environment" @@ -130,13 +129,12 @@ jobs: SIGN_WITH: dev@airflow.apache.org - name: "Fetch Git Tags" run: git fetch --tags - - name: "Test helm chart issue generation automatically" - # Adding same tags for now, will address in a follow-up + - name: "Test helm chart issue generation" run: > - breeze release-management generate-issue-content-helm-chart --limit-pr-count 10 - --latest --verbose + breeze release-management generate-issue-content-helm-chart --limit-pr-count 2 + --previous-release helm-chart/1.15.0 --current-release helm-chart/1.16.0 --verbose - name: "Upload Helm artifacts" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: Helm artifacts path: ./dist/airflow-* diff --git a/.github/workflows/integration-system-tests.yml b/.github/workflows/integration-system-tests.yml index fc3159223e8fb..0a728a351ed35 100644 --- a/.github/workflows/integration-system-tests.yml +++ b/.github/workflows/integration-system-tests.yml @@ -20,10 +20,14 @@ name: Integration and system tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: + runners: description: "The array of labels (in json form) determining public runners." required: true type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" + required: true + type: string testable-core-integrations: description: "The list of testable core integrations as JSON array." required: true @@ -71,7 +75,7 @@ jobs: timeout-minutes: 30 if: inputs.testable-core-integrations != '[]' name: "Integration core ${{ matrix.integration }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: fail-fast: false matrix: @@ -93,14 +97,14 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "Integration: core ${{ matrix.integration }}" env: @@ -111,7 +115,7 @@ jobs: uses: ./.github/actions/post_tests_success with: codecov-token: ${{ secrets.CODECOV_TOKEN }} - python-version: ${{ inputs.default-python-version }} + python-version: "${{ inputs.default-python-version }}" - name: "Post Tests failure" uses: ./.github/actions/post_tests_failure if: failure() @@ -120,7 +124,7 @@ jobs: timeout-minutes: 30 if: inputs.testable-providers-integrations != '[]' && inputs.skip-providers-tests != 'true' name: "Integration: providers ${{ matrix.integration }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: fail-fast: false matrix: @@ -142,14 +146,14 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "Integration: providers ${{ matrix.integration }}" env: @@ -159,7 +163,7 @@ jobs: uses: ./.github/actions/post_tests_success with: codecov-token: ${{ secrets.CODECOV_TOKEN }} - python-version: ${{ inputs.default-python-version }} + python-version: "${{ inputs.default-python-version }}" - name: "Post Tests failure" uses: ./.github/actions/post_tests_failure if: failure() @@ -168,7 +172,7 @@ jobs: timeout-minutes: 30 if: inputs.run-system-tests == 'true' name: "System Tests" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} env: BACKEND: "postgres" BACKEND_VERSION: ${{ inputs.default-postgres-version }}" @@ -186,14 +190,14 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "System Tests" run: > @@ -202,7 +206,7 @@ jobs: uses: ./.github/actions/post_tests_success with: codecov-token: ${{ secrets.CODECOV_TOKEN }} - python-version: ${{ inputs.default-python-version }} + python-version: "${{ inputs.default-python-version }}" - name: "Post Tests failure" uses: ./.github/actions/post_tests_failure if: failure() diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 40f73e3c59c66..37aa3aa703ce1 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -20,12 +20,12 @@ name: K8s tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - platform: - description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" + runners: + description: "The array of labels (in json form) determining runners." required: true type: string - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string python-versions-list-as-string: @@ -54,7 +54,7 @@ jobs: tests-kubernetes: timeout-minutes: 60 name: "K8S System:${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-${{ matrix.use-standard-naming }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: matrix: executor: [KubernetesExecutor, CeleryExecutor, LocalExecutor] @@ -80,7 +80,7 @@ jobs: echo "PYTHON_MAJOR_MINOR_VERSION=${KUBERNETES_COMBO}" | sed 's/-.*//' >> $GITHUB_ENV echo "KUBERNETES_VERSION=${KUBERNETES_COMBO}" | sed 's/=[^-]*-/=/' >> $GITHUB_ENV - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false # env.PYTHON_MAJOR_MINOR_VERSION, env.KUBERNETES_VERSION are set in the previous @@ -103,27 +103,26 @@ jobs: USE_STANDARD_NAMING: ${{ matrix.use-standard-naming }} VERBOSE: "false" - name: "\ - Upload KinD logs on failure ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ + Print logs ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ ${{ matrix.use-standard-naming }}" - uses: actions/upload-artifact@v4 - if: failure() || cancelled() - with: - name: "\ - kind-logs-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ - ${{ matrix.use-standard-naming }}" - path: /tmp/kind_logs_* - retention-days: '7' + run: | + for file in `find /tmp/kind_logs_*/ -type f` ; do + echo "::group::${file}" + cat $file + echo "::endgroup::" + done + if: failure() || cancelled() || inputs.include-success-outputs == 'true' - name: "\ - Upload test resource logs on failure ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ + Upload KinD logs ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ ${{ matrix.use-standard-naming }}" - uses: actions/upload-artifact@v4 - if: failure() || cancelled() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: "\ - k8s-test-resources-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ + kind-logs-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ ${{ matrix.use-standard-naming }}" - path: /tmp/k8s_test_resources_* + path: /tmp/kind_logs_* retention-days: '7' + if: failure() || cancelled() || inputs.include-success-outputs == 'true' - name: "Delete clusters just in case they are left" run: breeze k8s delete-cluster --all if: always() diff --git a/.github/workflows/news-fragment.yml b/.github/workflows/news-fragment.yml index f6f68d1288a35..04e308c306138 100644 --- a/.github/workflows/news-fragment.yml +++ b/.github/workflows/news-fragment.yml @@ -30,7 +30,7 @@ jobs: if: "contains(github.event.pull_request.labels.*.name, 'airflow3.0:breaking')" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false # `towncrier check` runs `git diff --name-only origin/main...`, which diff --git a/.github/workflows/prod-image-build.yml b/.github/workflows/prod-image-build.yml index a335576d4bcf5..6a075c9d52169 100644 --- a/.github/workflows/prod-image-build.yml +++ b/.github/workflows/prod-image-build.yml @@ -20,8 +20,8 @@ name: Build PROD images on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining default runner used for the build." + runners: + description: "The array of labels (in json form) determining runners." required: true type: string build-type: @@ -85,25 +85,17 @@ on: # yamllint disable-line rule:truthy required: true type: string branch: - description: "Branch used to run the CI jobs in (main/v2_*_test)." + description: "Branch used to run the CI jobs in (main/v*_*_test)." required: true type: string constraints-branch: description: "Branch used to construct constraints URL from." required: true type: string - build-provider-distributions: - description: "Whether to build provider distributions (true/false). If false providers are from PyPI" - required: true - type: string upgrade-to-newer-dependencies: description: "Whether to attempt to upgrade image to newer dependencies (true/false)" required: true type: string - chicken-egg-providers: - description: "Space-separated list of providers that should be installed from context files" - required: true - type: string docker-cache: description: "Docker cache specification to build the image (registry, local, disabled)." required: true @@ -122,22 +114,21 @@ jobs: build-prod-packages: name: "Build Airflow and provider distributions" timeout-minutes: 10 - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} if: inputs.prod-image-build == 'true' env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - VERSION_SUFFIX_FOR_PYPI: ${{ inputs.branch == 'main' && 'dev0' || '' }} steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" if: inputs.upload-package-artifact == 'true' - name: "Checkout target branch" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh if: inputs.upload-package-artifact == 'true' - name: "Cleanup dist and context file" shell: bash @@ -145,28 +136,23 @@ jobs: if: inputs.upload-package-artifact == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} if: inputs.upload-package-artifact == 'true' - - name: "Prepare providers packages" + - name: "Prepare providers packages - all providers built from sources" shell: bash run: > breeze release-management prepare-provider-distributions --distributions-list-file ./prod_image_installed_providers.txt - --distribution-format wheel --include-not-ready-providers + --distribution-format wheel --include-not-ready-providers --skip-tag-check if: > - inputs.upload-package-artifact == 'true' && - inputs.build-provider-distributions == 'true' - - name: "Prepare chicken-eggs provider distributions" + inputs.upload-package-artifact == 'true' && inputs.branch == 'main' + - name: "Prepare providers packages with only new versions of providers" shell: bash - env: - CHICKEN_EGG_PROVIDERS: ${{ inputs.chicken-egg-providers }} run: > breeze release-management prepare-provider-distributions - --distribution-format wheel ${CHICKEN_EGG_PROVIDERS} + --distributions-list-file ./prod_image_installed_providers.txt + --distribution-format wheel --include-not-ready-providers if: > - inputs.upload-package-artifact == 'true' && - inputs.chicken-egg-providers != '' + inputs.upload-package-artifact == 'true' && inputs.branch != 'main' - name: "Prepare airflow package" shell: bash run: > @@ -183,7 +169,7 @@ jobs: breeze release-management prepare-airflow-ctl-distributions --distribution-format wheel if: inputs.upload-package-artifact == 'true' - name: "Upload prepared packages as artifacts" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: prod-packages path: ./dist @@ -198,7 +184,7 @@ jobs: python-version: ${{ fromJSON(inputs.python-versions) || fromJSON('[""]') }} timeout-minutes: 80 name: "Build PROD ${{ inputs.build-type }} image ${{ matrix.python-version }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + runs-on: ${{ fromJSON(inputs.runners) }} needs: - build-prod-packages env: @@ -206,7 +192,6 @@ jobs: PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}" DEFAULT_BRANCH: ${{ inputs.branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ inputs.constraints-branch }} - VERSION_SUFFIX_FOR_PYPI: ${{ inputs.branch == 'main' && 'dev0' || '' }} INCLUDE_NOT_READY_PROVIDERS: "true" # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the # Airflow one is going to be used @@ -217,34 +202,45 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} + PLATFORM: ${{ inputs.platform }} VERBOSE: "true" steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout target branch" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist and context file" shell: bash run: rm -fv ./dist/* ./docker-context-files/* - name: "Download packages prepared as artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: prod-packages path: ./docker-context-files + - name: "Show downloaded packages" + run: ls -la ./docker-context-files - name: "Download constraints" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: - name: constraints + pattern: constraints-* path: ./docker-context-files + - name: "Show constraints" + run: | + for file in ./docker-context-files/constraints*/constraints*.txt + do + echo "=== ${file} ===" + echo + cat ${file} + echo + echo "=== END ${file} ===" + done - name: "Login to ghcr.io" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -267,25 +263,7 @@ jobs: INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} INCLUDE_NOT_READY_PROVIDERS: "true" - if: inputs.build-provider-distributions == 'true' - - name: "Build PROD images with PyPi providers ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" - shell: bash - run: > - breeze prod-image build - --builder airflow_cache - --commit-sha "${{ github.sha }}" - --install-distributions-from-context - --airflow-constraints-mode constraints - --use-constraints-for-context-distributions - env: - PUSH: ${{ inputs.push-image }} - DOCKER_CACHE: ${{ inputs.docker-cache }} - DISABLE_AIRFLOW_REPO_CACHE: ${{ inputs.disable-airflow-repo-cache }} - DEBIAN_VERSION: ${{ inputs.debian-version }} - INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} - UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} - INCLUDE_NOT_READY_PROVIDERS: "true" - if: inputs.build-provider-distributions != 'true' + USE_UV: ${{ inputs.use-uv }} - name: "Verify PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" run: breeze prod-image verify - name: "Export PROD docker image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" diff --git a/.github/workflows/prod-image-extra-checks.yml b/.github/workflows/prod-image-extra-checks.yml index 0b208cb552059..2d8a950982f8e 100644 --- a/.github/workflows/prod-image-extra-checks.yml +++ b/.github/workflows/prod-image-extra-checks.yml @@ -20,8 +20,12 @@ name: PROD images extra checks on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." + runners: + description: "The array of labels (in json form) determining runners." + required: true + type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string python-versions: @@ -33,25 +37,13 @@ on: # yamllint disable-line rule:truthy required: true type: string branch: - description: "Branch used to run the CI jobs in (main/v2_*_test)." - required: true - type: string - use-uv: - description: "Whether to use uv to build the image (true/false)" - required: true - type: string - build-provider-distributions: - description: "Whether to build provider distributions (true/false). If false providers are from PyPI" + description: "Branch used to run the CI jobs in (main/v*_*_test)." required: true type: string upgrade-to-newer-dependencies: description: "Whether to attempt to upgrade image to newer dependencies (false/RANDOM_VALUE)" required: true type: string - chicken-egg-providers: - description: "Space-separated list of providers that should be installed from context files" - required: true - type: string constraints-branch: description: "Branch used to construct constraints URL from." required: true @@ -70,21 +62,19 @@ jobs: myssql-client-image: uses: ./.github/workflows/prod-image-build.yml with: - runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} build-type: "MySQL Client" upload-image-artifact: "false" upload-package-artifact: "false" install-mysql-client-type: "mysql" python-versions: ${{ inputs.python-versions }} - default-python-version: ${{ inputs.default-python-version }} - platform: "linux/amd64" + default-python-version: "${{ inputs.default-python-version }}" branch: ${{ inputs.branch }} # Always build images during the extra checks and never push them push-image: "false" - use-uv: ${{ inputs.use-uv }} - build-provider-distributions: ${{ inputs.build-provider-distributions }} + use-uv: "true" upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} @@ -92,24 +82,20 @@ jobs: pip-image: uses: ./.github/workflows/prod-image-build.yml - # Skip testing PIP image on release branches as all images there are built with pip - if: ${{ inputs.use-uv == 'true' }} with: - runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} build-type: "pip" upload-image-artifact: "false" upload-package-artifact: "false" install-mysql-client-type: "mysql" python-versions: ${{ inputs.python-versions }} - default-python-version: ${{ inputs.default-python-version }} - platform: "linux/amd64" + default-python-version: "${{ inputs.default-python-version }}" branch: ${{ inputs.branch }} # Always build images during the extra checks and never push them push-image: "false" use-uv: "false" - build-provider-distributions: ${{ inputs.build-provider-distributions }} upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/publish-docs-to-s3.yml b/.github/workflows/publish-docs-to-s3.yml new file mode 100644 index 0000000000000..7d6a35cfbe52a --- /dev/null +++ b/.github/workflows/publish-docs-to-s3.yml @@ -0,0 +1,369 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Publish Docs to S3 +on: # yamllint disable-line rule:truthy + workflow_dispatch: + inputs: + ref: + description: "The branch or tag to checkout for the docs publishing" + required: true + type: string + destination: + description: "The destination location in S3" + required: false + default: auto + type: choice + options: + - auto + - live + - staging + include-docs: + description: "Space separated list of packages to build" + required: true + type: string + exclude-docs: + description: "Comma separated list of docs to exclude" + required: false + default: "no-docs-excluded" + type: string + skip-write-to-stable-folder: + description: "Do not override stable version" + required: false + default: false + type: boolean + build-sboms: + description: "Build SBOMs" + required: false + default: false + type: boolean + airflow-base-version: + required: false + description: "Override the Airflow Base Version to use for the docs build" + type: string + airflow-version: + required: false + description: "Override the Airflow Version to use for the docs build" + type: string + apply-commits: + required: false + description: "Optionally apply commit hashes before building - to patch the docs (coma separated)" + type: string +permissions: + contents: read +jobs: + build-info: + timeout-minutes: 10 + name: "Build Info" + runs-on: ["ubuntu-24.04"] + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + VERBOSE: true + REF: ${{ inputs.ref }} + INCLUDE_DOCS: ${{ inputs.include-docs }} + EXCLUDE_DOCS: ${{ inputs.exclude-docs }} + DESTINATION: ${{ inputs.destination }} + SKIP_WRITE_TO_STABLE_FOLDER: ${{ inputs.skip-write-to-stable-folder }} + BUILD_SBOMS: ${{ inputs.build-sboms }} + AIRFLOW_BASE_VERSION: ${{ inputs.airflow-base-version || '' }} + AIRFLOW_VERSION: ${{ inputs.airflow-version || '' }} + APPLY_COMMITS: ${{ inputs.apply-commits || '' }} + outputs: + include-docs: ${{ inputs.include-docs == 'all' && '' || inputs.include-docs }} + destination-location: ${{ steps.parameters.outputs.destination-location }} + destination: ${{ steps.parameters.outputs.destination }} + extra-build-options: ${{ steps.parameters.outputs.extra-build-options }} + airflow-base-version: ${{ steps.parameters.outputs.airflow-base-version }} + airflow-version: ${{ steps.parameters.outputs.airflow-version }} + # yamllint disable rule:line-length + skip-write-to-stable-folder: ${{ inputs.skip-write-to-stable-folder && '--skip-write-to-stable-folder' || '' }} + default-python-version: "3.10" + if: contains(fromJSON('[ + "ashb", + "eladkal", + "ephraimbuddy", + "jedcunningham", + "kaxil", + "pierrejeambrun", + "potiuk", + "utkarsharma2" + ]'), github.event.sender.login) + steps: + - name: "Input parameters summary" + shell: bash + id: parameters + run: | + echo "Input parameters summary" + echo "=========================" + echo "Ref: '${REF}'" + echo "Included docs : '${INCLUDE_DOCS}'" + echo "Exclude docs: '${EXCLUDE_DOCS}'" + echo "Destination: '${DESTINATION}'" + echo "Skip write to stable folder: '${SKIP_WRITE_TO_STABLE_FOLDER}'" + echo "Build SBOMs: '${BUILD_SBOMS}'" + echo "Airflow Base Version: '${AIRFLOW_BASE_VERSION}'" + echo "Airflow Version: '${AIRFLOW_VERSION}'" + echo "Apply commits: '${APPLY_COMMITS}'" + if [[ "${DESTINATION}" == "auto" ]]; then + if [[ "${REF}" =~ ^.*[0-9]*\.[0-9]*\.[0-9]*$ ]]; then + echo "${REF} looks like final release, using live destination" + DESTINATION="live" + else + echo "${REF} does not looks like final release, using staging destination" + DESTINATION="staging" + fi + fi + echo "destination=${DESTINATION}" >> ${GITHUB_OUTPUT} + if [[ "${DESTINATION}" == "live" ]]; then + echo "destination-location=s3://live-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + else + echo "destination-location=s3://staging-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + fi + if [[ " ${INCLUDE_DOCS} " =~ " apache-airflow " ]]; then + if [[ ${AIRFLOW_BASE_VERSION=} == "" && ${AIRFLOW_VERSION=} == "" ]]; then + echo "No Airflow Versions provided, using ${REF} to determine it." + AIRFLOW_VERSION="${REF}" + set +e + AIRFLOW_BASE_VERSION=$(echo "${REF}" | grep -oE '[0-9]+\.[0-9]+\.[0-9]+') + set -e + if [[ ${AIRFLOW_BASE_VERSION=} == "" ]]; then + echo + echo "No Airflow Base Version found in ${REF}" + echo "You need to force airflow version and airflow base version in the workflow." + echo + exit 1 + fi + fi + echo "airflow-base-version=${AIRFLOW_BASE_VERSION}" >> ${GITHUB_OUTPUT} + echo "airflow-version=${AIRFLOW_VERSION}" >> ${GITHUB_OUTPUT} + else + echo "airflow-version=no-airflow" >> ${GITHUB_OUTPUT} + echo "airflow-base-version=no-airflow" >> ${GITHUB_OUTPUT} + fi + + build-docs: + needs: [build-info] + timeout-minutes: 150 + name: "Build documentation" + runs-on: ubuntu-latest + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + INCLUDE_SUCCESS_OUTPUTS: false + VERBOSE: "true" + EXTRA_BUILD_OPTIONS: ${{ needs.build-info.outputs.extra-build-options }} + APPLY_COMMITS: ${{ inputs.apply-commits || '' }} + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout current version first to clean-up stuff" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + path: current-version + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + working-directory: current-version + # We are checking repo for both - breeze and docs from the ref provided as input + # This will take longer as we need to rebuild CI image and it will not use cache + # but it will build the CI image from the version of Airflow that is used to check out things + - name: "Checkout ${{ inputs.ref }} " + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + ref: ${{ inputs.ref }} + fetch-depth: 0 + fetch-tags: true + - name: "Apply patch commits if provided" + run: | + if [[ "${APPLY_COMMITS}" != "" ]]; then + git config --global user.email "bot@airflow.apache.org" + git config --global user.name "Your friendly bot" + echo "Applying commits ${APPLY_COMMITS} to the docs" + # Split APPLY_COMMITS by comma and apply each commit + IFS=',' read -ra COMMIT_ARRAY <<< "${APPLY_COMMITS}" + for APPLY_COMMIT in "${COMMIT_ARRAY[@]}"; do + echo "Applying commit ${APPLY_COMMIT}" + git fetch origin "${APPLY_COMMIT}" + git cherry-pick "${APPLY_COMMIT}" + done + else + echo "No commits provided to apply, skipping." + fi + - name: "Install Breeze from the ${{ inputs.ref }} reference" + uses: ./.github/actions/breeze + with: + python-version: "${{ needs.build-info.outputs.default-python-version }}" + - name: "Building image from the ${{ inputs.ref }} reference" + env: + INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }} + INCLUDE_COMMITS: ${{ startsWith(inputs.ref, 'providers') && 'true' || 'false' }} + # if the regular breeze ci-image build fails, we will try to build the image using docker buildx + # This is needed for the case when we are building an old image which tries to use main as + # a cache and it fails because the main branch has changed and does not have the same pyproject.toml + # Structure as the one we are trying to build. + run: > + breeze ci-image build || + docker buildx build --load --builder default --progress=auto --pull + --build-arg AIRFLOW_EXTRAS=devel-ci --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES=false + --build-arg AIRFLOW_USE_UV=true --build-arg UV_HTTP_TIMEOUT=300 + --build-arg BUILD_PROGRESS=auto --build-arg INSTALL_MYSQL_CLIENT_TYPE=mariadb + --build-arg VERSION_SUFFIX_FOR_PYPI=dev0 + -t ghcr.io/apache/airflow/main/ci/python3.9:latest --target main . + -f Dockerfile.ci --platform linux/amd64 + - name: "Building docs with --docs-only flag using ${{ inputs.ref }} reference breeze" + env: + INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }} + INCLUDE_COMMITS: ${{ startsWith(inputs.ref, 'providers') && 'true' || 'false' }} + run: > + breeze build-docs ${INCLUDE_DOCS} --docs-only + - name: "Checkout current version to run SBOM generation" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + fetch-depth: 0 + fetch-tags: true + path: current-version + if: inputs.build-sboms + - name: "Reinstall breeze from the current version" + run: | + breeze setup self-upgrade --use-current-airflow-sources + if: inputs.build-sboms + working-directory: current-version + - name: "Make sure SBOM dir exists and has the right permissions" + run: | + sudo mkdir -vp ./files/sbom + sudo chown -R "${USER}" . + working-directory: current-version + if: inputs.build-sboms + - name: "Prepare SBOMs using current version of Breeze" + env: + AIRFLOW_VERSION: ${{ needs.build-info.outputs.airflow-version }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PYTHON_VERSION: "${{ needs.build-info.outputs.default-python-version }}" + FORCE: "true" + run: > + breeze sbom update-sbom-information + --airflow-version ${AIRFLOW_VERSION} --remote-name origin --force + --all-combinations --run-in-parallel --airflow-root-path "${GITHUB_WORKSPACE}" + working-directory: current-version + if: inputs.build-sboms + - name: "Generated SBOM files" + run: | + echo "Generated SBOM files:" + find ./generated/_build/docs/apache-airflow/stable/sbom/ -type f | sort + if: inputs.build-sboms + - name: "Reinstall breeze from ${{ inputs.ref }} reference" + run: + breeze setup self-upgrade --use-current-airflow-sources + if: inputs.build-sboms + - name: Check disk space available + run: df -H + # Here we will create temp airflow-site dir to publish docs + - name: Create /mnt/airflow-site directory + run: | + sudo mkdir -p /mnt/airflow-site && sudo chown -R "${USER}" /mnt/airflow-site + echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/" >> "$GITHUB_ENV" + - name: "Publish docs to /mnt/airflow-site directory using ${{ inputs.ref }} reference breeze" + env: + INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }} + run: > + breeze release-management publish-docs --override-versioned --run-in-parallel ${INCLUDE_DOCS} + - name: "Upload build docs" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: airflow-docs + path: /mnt/airflow-site + retention-days: '7' + if-no-files-found: 'error' + overwrite: 'true' + + publish-docs-to-s3: + needs: [build-docs, build-info] + name: "Publish documentation to S3" + permissions: + id-token: write + contents: read + runs-on: ubuntu-latest + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + INCLUDE_SUCCESS_OUTPUTS: false + PYTHON_MAJOR_MINOR_VERSION: 3.10 + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + # We are checking repo for both - breeze and docs from the "workflow' branch + # This will take longer as we need to rebuild CI image and it will not use cache + # but it will build the CI image from the version of Airflow that is used to check out things + - name: "Checkout ${{ inputs.ref }} " + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: "Install Breeze" + uses: ./.github/actions/breeze + - name: "Download docs prepared as artifacts" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: airflow-docs + path: /mnt/airflow-site + - name: Check disk space available + run: df -H + - name: "Update watermarks" + env: + SOURCE_DIR_PATH: "/mnt/airflow-site/docs-archive/" + # yamllint disable rule:line-length + run: | + curl -sSf -o add_watermark.py https://raw.githubusercontent.com/apache/airflow-site/refs/heads/main/.github/scripts/add_watermark.py \ + --header "Authorization: Bearer ${{ github.token }} " --header "X-GitHub-Api-Version: 2022-11-28" + chmod a+x add_watermark.py + mkdir -p images + curl -sSf -o images/staging.png https://raw.githubusercontent.com/apache/airflow-site/refs/heads/main/.github/scripts/images/staging.png + uv run add_watermark.py --pattern 'main.min*css' --folder ${SOURCE_DIR_PATH} \ + --image-directory images --url-prefix /images + if: needs.build-info.outputs.destination == 'staging' + - name: Install AWS CLI v2 + run: | + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o /tmp/awscliv2.zip + unzip -q /tmp/awscliv2.zip -d /tmp + rm /tmp/awscliv2.zip + sudo /tmp/aws/install --update + rm -rf /tmp/aws/ + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 + with: + aws-access-key-id: ${{ secrets.DOCS_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.DOCS_AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-2 + - name: "Syncing docs to S3" + env: + DESTINATION_LOCATION: "${{ needs.build-info.outputs.destination-location }}" + SOURCE_DIR_PATH: "/mnt/airflow-site/docs-archive/" + EXCLUDE_DOCS: "${{ inputs.exclude-docs }}" + SKIP_WRITE_TO_STABLE_FOLDER: "${{ needs.build-info.outputs.skip-write-to-stable-folder }}" + run: | + breeze release-management publish-docs-to-s3 --source-dir-path ${SOURCE_DIR_PATH} \ + --destination-location ${DESTINATION_LOCATION} --stable-versions \ + --exclude-docs ${EXCLUDE_DOCS} --overwrite ${SKIP_WRITE_TO_STABLE_FOLDER} diff --git a/.github/workflows/push-image-cache.yml b/.github/workflows/push-image-cache.yml index cf8097cebd768..eccd176096137 100644 --- a/.github/workflows/push-image-cache.yml +++ b/.github/workflows/push-image-cache.yml @@ -20,12 +20,8 @@ name: Push image cache on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." - required: true - type: string - runs-on-as-json-self-hosted: - description: "The array of labels (in json form) determining self-hosted runners." + runners: + description: "The array of labels (in json form) determining runners." required: true type: string cache-type: @@ -57,7 +53,7 @@ on: # yamllint disable-line rule:truthy required: true type: string branch: - description: "Branch used to run the CI jobs in (main/v2_*_test)." + description: "Branch used to run the CI jobs in (main/v*_*_test)." required: true type: string constraints-branch: @@ -83,11 +79,7 @@ on: # yamllint disable-line rule:truthy jobs: push-ci-image-cache: name: "Push CI ${{ inputs.cache-type }}:${{ matrix.python }} image cache " - # NOTE!!!!! This has to be put in one line for runs-on to recognize the "fromJSON" properly !!!! - # adding space before (with >) apparently turns the `runs-on` processed line into a string "Array" - # instead of an array of strings. - # yamllint disable-line rule:line-length - runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + runs-on: ${{ fromJSON(inputs.runners) }} permissions: contents: read packages: write @@ -116,21 +108,18 @@ jobs: PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python }}" UPGRADE_TO_NEWER_DEPENDENCIES: "false" VERBOSE: "true" - VERSION_SUFFIX_FOR_PYPI: "dev0" steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - name: Login to ghcr.io env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -159,11 +148,7 @@ jobs: push-prod-image-cache: name: "Push PROD ${{ inputs.cache-type }}:${{ matrix.python }} image cache" - # NOTE!!!!! This has to be put in one line for runs-on to recognize the "fromJSON" properly !!!! - # adding space before (with >) apparently turns the `runs-on` processed line into a string "Array" - # instead of an array of strings. - # yamllint disable-line rule:line-length - runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + runs-on: ${{ fromJSON(inputs.runners) }} permissions: contents: read packages: write @@ -191,26 +176,23 @@ jobs: PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python }}" UPGRADE_TO_NEWER_DEPENDENCIES: "false" VERBOSE: "true" - VERSION_SUFFIX_FOR_PYPI: "dev0" if: inputs.include-prod-images == 'true' steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist and context file" run: rm -fv ./dist/* ./docker-context-files/* - name: "Download packages prepared as artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: prod-packages path: ./docker-context-files diff --git a/.github/workflows/recheck-old-bug-report.yml b/.github/workflows/recheck-old-bug-report.yml index 217092b86f87e..c245f73923216 100644 --- a/.github/workflows/recheck-old-bug-report.yml +++ b/.github/workflows/recheck-old-bug-report.yml @@ -28,7 +28,7 @@ jobs: recheck-old-bug-report: runs-on: ["ubuntu-22.04"] steps: - - uses: actions/stale@v9 + - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 with: only-issue-labels: 'kind:bug' stale-issue-label: 'Stale Bug Report' diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index 0afeb8e143e90..dbe38567c5cb0 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -21,12 +21,16 @@ on: # yamllint disable-line rule:truthy workflow_dispatch: inputs: airflowVersion: - description: 'Airflow version' + description: 'Airflow version (e.g. 3.0.1, 3.0.1rc1, 3.0.1b1)' required: true - skipLatest: - description: 'Skip Latest: Set to true if not latest.' + amdOnly: + type: boolean + description: 'Limit to amd64 images' + default: false + limitPythonVersions: + type: string + description: 'Force python versions (e.g. "3.10 3.11")' default: '' - required: false permissions: contents: read packages: read @@ -40,46 +44,20 @@ jobs: build-info: timeout-minutes: 10 name: "Build Info" - runs-on: ["ubuntu-22.04"] + runs-on: ["ubuntu-24.04"] outputs: - pythonVersions: ${{ steps.selective-checks.outputs.python-versions }} - allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }} - defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} - chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} - skipLatest: ${{ github.event.inputs.skipLatest == '' && ' ' || '--skip-latest' }} - limitPlatform: ${{ github.repository == 'apache/airflow' && ' ' || '--limit-platform linux/amd64' }} + pythonVersions: ${{ steps.determine-python-versions.outputs.python-versions }} + platformMatrix: ${{ steps.determine-matrix.outputs.platformMatrix }} + airflowVersion: ${{ steps.check-airflow-version.outputs.airflowVersion }} + skipLatest: ${{ steps.check-airflow-version.outputs.skip-latest }} + amd-runners: ${{ steps.selective-checks.outputs.amd-runners }} + arm-runners: ${{ steps.selective-checks.outputs.arm-runners }} env: GITHUB_CONTEXT: ${{ toJson(github) }} VERBOSE: true - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Install Breeze" - uses: ./.github/actions/breeze - with: - use-uv: "false" - - name: Selective checks - id: selective-checks - env: - VERBOSE: "false" - run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} - - release-images: - timeout-minutes: 120 - name: "Release images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }}" - runs-on: ["ubuntu-22.04"] - needs: [build-info] - strategy: - fail-fast: false - matrix: - python-version: ${{ fromJSON(needs.build-info.outputs.pythonVersions) }} + AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} + AMD_ONLY: ${{ github.event.inputs.amdOnly }} + LIMIT_PYTHON_VERSIONS: ${{ github.event.inputs.limitPythonVersions }} if: contains(fromJSON('[ "ashb", "eladkal", @@ -91,132 +69,80 @@ jobs: "utkarsharma2" ]'), github.event.sender.login) steps: + - name: "Input parameters summary" + shell: bash + run: | + echo "Input parameters summary" + echo "=========================" + echo "Airflow version: '${AIRFLOW_VERSION}'" + echo "AMD only: '${AMD_ONLY}'" + echo "Limit python versions: '${LIMIT_PYTHON_VERSIONS}'" - name: "Cleanup repo" shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + run: > + docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: "Install uv" + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: "Check airflow version" + id: check-airflow-version + shell: bash + run: uv run scripts/ci/airflow_version_check.py "${AIRFLOW_VERSION}" >> "${GITHUB_OUTPUT}" - name: "Install Breeze" uses: ./.github/actions/breeze - with: - use-uv: "false" - - name: Free space - run: breeze ci free-space --answer yes - - name: "Cleanup dist and context file" - run: rm -fv ./dist/* ./docker-context-files/* - - name: "Login to hub.docker.com" - run: > - echo ${{ secrets.DOCKERHUB_TOKEN }} | - docker login --password-stdin --username ${{ secrets.DOCKERHUB_USER }} - - name: Login to ghcr.io + - name: Selective checks + id: selective-checks env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - ACTOR: ${{ github.actor }} - run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin - - name: "Install buildx plugin" - # yamllint disable rule:line-length - run: | - sudo apt-get update - sudo apt-get install ca-certificates curl - sudo install -m 0755 -d /etc/apt/keyrings - sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc - sudo chmod a+r /etc/apt/keyrings/docker.asc - - # Add the repository to Apt sources: - echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ - $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ - sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - sudo apt-get update - sudo apt install docker-buildx-plugin - - name: "Install regctl" - # yamllint disable rule:line-length - run: | - mkdir -p ~/bin - curl -L https://github.com/regclient/regclient/releases/latest/download/regctl-linux-amd64 >${HOME}/bin/regctl - chmod 755 ${HOME}/bin/regctl - echo "${HOME}/bin" >>${GITHUB_PATH} - - name: "Install emulation support" - run: docker run --privileged --rm tonistiigi/binfmt --install all - - name: "Create airflow_cache builder" - run: docker buildx create --name airflow_cache - - name: "Prepare chicken-eggs provider distributions" - # In case of provider distributions which use latest r00 version of providers, we should prepare them - # from the source code, not from the PyPI because they have apache-airflow>=X.Y.Z dependency - # And when we prepare them from sources they will have apache-airflow>=X.Y.Z.rc0 + VERBOSE: "false" + run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} + - name: "Determine build matrix" shell: bash - env: - CHICKEN_EGG_PROVIDERS: ${{ needs.build-info.outputs.chicken-egg-providers }} - run: > - breeze release-management prepare-provider-distributions - --distribution-format wheel - --version-suffix-for-pypi rc0 ${CHICKEN_EGG_PROVIDERS} - if: needs.build-info.outputs.chicken-egg-providers != '' - - name: "Copy dist packages to docker-context files" + id: determine-matrix + run: | + if [[ "${AMD_ONLY}" = "true" ]]; then + echo 'platformMatrix=["linux/amd64"]' >> "${GITHUB_OUTPUT}" + else + echo 'platformMatrix=["linux/amd64", "linux/arm64"]' >> "${GITHUB_OUTPUT}" + fi + - name: "Determine python versions" shell: bash - run: cp -v --no-preserve=mode,ownership ./dist/*.whl ./docker-context-files - if: needs.build-info.outputs.chicken-egg-providers != '' - - name: > - Release regular images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} + id: determine-python-versions env: - COMMIT_SHA: ${{ github.sha }} - REPOSITORY: ${{ github.repository }} - PYTHON_VERSION: ${{ matrix.python-version }} - AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} - SKIP_LATEST: ${{ needs.build-info.outputs.skipLatest }} - LIMIT_PLATFORM: ${{ needs.build-info.outputs.limitPlatform }} - CHICKEN_EGG_PROVIDERS: ${{ needs.build-info.outputs.chicken-egg-providers }} - run: > - breeze release-management release-prod-images - --dockerhub-repo "${REPOSITORY}" - --airflow-version "${AIRFLOW_VERSION}" - ${SKIP_LATEST} - ${LIMIT_PLATFORM} - --limit-python ${PYTHON_VERSION} - --chicken-egg-providers "${CHICKEN_EGG_PROVIDERS}" - - name: > - Release slim images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} - env: - COMMIT_SHA: ${{ github.sha }} - REPOSITORY: ${{ github.repository }} - PYTHON_VERSION: ${{ matrix.python-version }} - AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} - SKIP_LATEST: ${{ needs.build-info.outputs.skipLatest }} - LIMIT_PLATFORM: ${{ needs.build-info.outputs.limitPlatform }} - run: > - breeze release-management release-prod-images - --dockerhub-repo "${REPOSITORY}" - --airflow-version "${AIRFLOW_VERSION}" - ${SKIP_LATEST} - ${LIMIT_PLATFORM} - --limit-python ${PYTHON_VERSION} --slim-images - - name: > - Verify regular AMD64 image: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} - env: - PYTHON_VERSION: ${{ matrix.python-version }} - AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} - REPOSITORY: ${{ github.repository }} - run: > - breeze prod-image verify - --pull - --image-name - ${REPOSITORY}:${AIRFLOW_VERSION}-python${PYTHON_VERSION} - - name: > - Verify slim AMD64 image: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} - env: - PYTHON_VERSION: ${{ matrix.python-version }} - AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} - REPOSITORY: ${{ github.repository }} - run: > - breeze prod-image verify - --pull - --slim-image - --image-name - ${REPOSITORY}:slim-${AIRFLOW_VERSION}-python${PYTHON_VERSION} - - name: "Docker logout" - run: docker logout - if: always() + ALL_PYTHON_VERSIONS: ${{ steps.selective-checks.outputs.all-python-versions }} + # yamllint disable rule:line-length + run: | + # override python versions if specified + if [[ "${LIMIT_PYTHON_VERSIONS}" != "" ]]; then + PYTHON_VERSIONS=$(python3 -c "import json; print(json.dumps('${LIMIT_PYTHON_VERSIONS}'.split(' ')))") + else + PYTHON_VERSIONS=${ALL_PYTHON_VERSIONS} + fi + echo "python-versions=${PYTHON_VERSIONS}" >> "${GITHUB_OUTPUT}" + + + release-images: + name: "Release images" + needs: [build-info] + strategy: + fail-fast: false + matrix: + python: ${{ fromJSON(needs.build-info.outputs.pythonVersions) }} + uses: ./.github/workflows/release_single_dockerhub_image.yml + secrets: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + permissions: + contents: read + with: + pythonVersion: ${{ matrix.python }} + airflowVersion: ${{ needs.build-info.outputs.airflowVersion }} + platformMatrix: ${{ needs.build-info.outputs.platformMatrix }} + skipLatest: ${{ needs.build-info.outputs.skipLatest }} + armRunners: ${{ needs.build-info.outputs.arm-runners }} + amdRunners: ${{ needs.build-info.outputs.amd-runners }} diff --git a/.github/workflows/release_single_dockerhub_image.yml b/.github/workflows/release_single_dockerhub_image.yml new file mode 100644 index 0000000000000..991ac35b13924 --- /dev/null +++ b/.github/workflows/release_single_dockerhub_image.yml @@ -0,0 +1,238 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: "Release single PROD image" +on: # yamllint disable-line rule:truthy + workflow_call: + inputs: + airflowVersion: + description: 'Airflow version (e.g. 3.0.1, 3.0.1rc1, 3.0.1b1)' + type: string + required: true + platformMatrix: + description: 'Platform matrix formatted as json (e.g. ["linux/amd64", "linux/arm64"])' + type: string + required: true + pythonVersion: + description: 'Python version (e.g. 3.10, 3.11)' + type: string + required: true + skipLatest: + description: "Skip tagging latest release (true/false)" + type: string + required: true + amdRunners: + description: "Amd64 runners (e.g. [\"ubuntu-22.04\", \"ubuntu-24.04\"])" + type: string + required: true + armRunners: + description: "Arm64 runners (e.g. [\"ubuntu-22.04\", \"ubuntu-24.04\"])" + type: string + required: true + secrets: + DOCKERHUB_USER: + required: true + DOCKERHUB_TOKEN: + required: true +permissions: + contents: read +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + VERBOSE: true +jobs: + build-images: + timeout-minutes: 50 + # yamllint disable rule:line-length + name: "Build: ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}, ${{ matrix.platform }}" + runs-on: ${{ (matrix.platform == 'linux/amd64') && fromJSON(inputs.amdRunners) || fromJSON(inputs.armRunners) }} + strategy: + fail-fast: false + matrix: + platform: ${{ fromJSON(inputs.platformMatrix) }} + env: + AIRFLOW_VERSION: ${{ inputs.airflowVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ inputs.pythonVersion }} + PLATFORM: ${{ matrix.platform }} + SKIP_LATEST: ${{ inputs.skipLatest == 'true' && '--skip-latest' || '' }} + COMMIT_SHA: ${{ github.sha }} + REPOSITORY: ${{ github.repository }} + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: "Install Breeze" + uses: ./.github/actions/breeze + - name: Free space + run: breeze ci free-space --answer yes + - name: "Cleanup dist and context file" + run: rm -fv ./dist/* ./docker-context-files/* + - name: "Login to hub.docker.com" + run: > + echo ${{ secrets.DOCKERHUB_TOKEN }} | + docker login --password-stdin --username ${{ secrets.DOCKERHUB_USER }} + - name: "Get env vars for metadata" + shell: bash + run: | + echo "ARTIFACT_NAME=metadata-${PYTHON_MAJOR_MINOR_VERSION}-${PLATFORM/\//_}" >> "${GITHUB_ENV}" + echo "MANIFEST_FILE_NAME=metadata-${AIRFLOW_VERSION}-${PLATFORM/\//_}-${PYTHON_MAJOR_MINOR_VERSION}.json" >> "${GITHUB_ENV}" + echo "MANIFEST_SLIM_FILE_NAME=metadata-${AIRFLOW_VERSION}-slim-${PLATFORM/\//_}-${PYTHON_MAJOR_MINOR_VERSION}.json" >> "${GITHUB_ENV}" + - name: Login to ghcr.io + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ACTOR: ${{ github.actor }} + run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin + - name: "Install buildx plugin" + # yamllint disable rule:line-length + run: | + sudo apt-get update + sudo apt-get install ca-certificates curl + sudo install -m 0755 -d /etc/apt/keyrings + sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc + sudo chmod a+r /etc/apt/keyrings/docker.asc + + # Add the repository to Apt sources: + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + sudo apt-get update + sudo apt install docker-buildx-plugin + - name: "Create airflow_cache builder" + run: docker buildx create --name airflow_cache --driver docker-container + - name: > + Build regular images: ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}, ${{ matrix.platform }} + run: > + breeze release-management release-prod-images --dockerhub-repo "${REPOSITORY}" + --airflow-version "${AIRFLOW_VERSION}" ${SKIP_LATEST} + --python ${PYTHON_MAJOR_MINOR_VERSION} + --metadata-folder dist + - name: > + Verify regular image: ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}, ${{ matrix.platform }} + run: > + breeze prod-image verify --pull --manifest-file dist/${MANIFEST_FILE_NAME} + - name: > + Release slim images: ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}, ${{ matrix.platform }} + run: > + breeze release-management release-prod-images --dockerhub-repo "${REPOSITORY}" + --airflow-version "${AIRFLOW_VERSION}" ${SKIP_LATEST} + --python ${PYTHON_MAJOR_MINOR_VERSION} --slim-images + --metadata-folder dist + - name: > + Verify slim image: ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}, ${{ matrix.platform }} + run: > + breeze prod-image verify --pull --slim-image --manifest-file dist/${MANIFEST_SLIM_FILE_NAME} + - name: "List upload-able artifacts" + shell: bash + run: find ./dist -name '*.json' + - name: "Upload metadata artifact ${{ env.ARTIFACT_NAME }}" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: ${{ env.ARTIFACT_NAME }} + path: ./dist/metadata-* + retention-days: 7 + if-no-files-found: error + - name: "Docker logout" + run: docker logout + if: always() + + merge-images: + timeout-minutes: 5 + name: "Merge: ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}" + runs-on: ["ubuntu-22.04"] + needs: [build-images] + env: + AIRFLOW_VERSION: ${{ inputs.airflowVersion }} + PYTHON_MAJOR_MINOR_VERSION: ${{ inputs.pythonVersion }} + SKIP_LATEST: ${{ inputs.skipLatest == 'true' && '--skip-latest' || '' }} + COMMIT_SHA: ${{ github.sha }} + REPOSITORY: ${{ github.repository }} + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: "Install Breeze" + uses: ./.github/actions/breeze + - name: Free space + run: breeze ci free-space --answer yes + - name: "Cleanup dist and context file" + run: rm -fv ./dist/* ./docker-context-files/* + - name: "Login to hub.docker.com" + run: > + echo ${{ secrets.DOCKERHUB_TOKEN }} | + docker login --password-stdin --username ${{ secrets.DOCKERHUB_USER }} + - name: Login to ghcr.io + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ACTOR: ${{ github.actor }} + run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin + - name: "Download metadata artifacts" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + path: ./dist + pattern: metadata-${{ inputs.pythonVersion }}-* + - name: "List downloaded artifacts" + shell: bash + run: find ./dist -name '*.json' + - name: "Install buildx plugin" + # yamllint disable rule:line-length + run: | + sudo apt-get update + sudo apt-get install ca-certificates curl + sudo install -m 0755 -d /etc/apt/keyrings + sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc + sudo chmod a+r /etc/apt/keyrings/docker.asc + + # Add the repository to Apt sources: + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + sudo apt-get update + sudo apt install docker-buildx-plugin + - name: "Install regctl" + # yamllint disable rule:line-length + run: | + mkdir -p ~/bin + curl -L https://github.com/regclient/regclient/releases/latest/download/regctl-linux-amd64 >${HOME}/bin/regctl + chmod 755 ${HOME}/bin/regctl + echo "${HOME}/bin" >>${GITHUB_PATH} + - name: "Merge regular images ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}" + run: > + breeze release-management merge-prod-images --dockerhub-repo "${REPOSITORY}" + --airflow-version "${AIRFLOW_VERSION}" ${SKIP_LATEST} + --python ${PYTHON_MAJOR_MINOR_VERSION} --metadata-folder dist + - name: "Merge slim images ${{ inputs.airflowVersion }}, ${{ inputs.pythonVersion }}" + run: > + breeze release-management merge-prod-images --dockerhub-repo "${REPOSITORY}" + --airflow-version "${AIRFLOW_VERSION}" ${SKIP_LATEST} + --python ${PYTHON_MAJOR_MINOR_VERSION} --metadata-folder dist --slim-images + - name: "Docker logout" + run: docker logout + if: always() diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml index 939fa2b634179..bde6c8fbb7945 100644 --- a/.github/workflows/run-unit-tests.yml +++ b/.github/workflows/run-unit-tests.yml @@ -20,8 +20,12 @@ name: Unit tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + runners: + description: "The array of labels (in json form) determining public AMD runners." + required: true + type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string test-group: @@ -37,7 +41,7 @@ on: # yamllint disable-line rule:truthy required: true type: string test-scope: - description: "The scope of the test to run: ('DB', 'Non-DB', 'All', 'ARM collection')" + description: "The scope of the test to run: ('DB', 'Non-DB', 'All')" required: true type: string test-name: @@ -88,6 +92,11 @@ on: # yamllint disable-line rule:truthy required: false default: "false" type: string + upgrade-sqlalchemy: + description: "Whether to upgrade SQLAlchemy or not (true/false)" + required: false + default: "false" + type: string upgrade-boto: description: "Whether to upgrade boto or not (true/false)" required: false @@ -116,16 +125,23 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv" required: true type: string + default-branch: + description: "The default branch of the repository" + required: true + type: string permissions: contents: read jobs: tests: - timeout-minutes: 120 + timeout-minutes: 65 + # yamllint disable rule:line-length name: "\ - ${{ inputs.test-scope }}-${{ inputs.test-group }}:\ + ${{ inputs.test-scope == 'All' && '' || inputs.test-scope == 'Quarantined' && 'Qrnt' || inputs.test-scope }}\ + ${{ inputs.test-scope == 'All' && '' || '-' }}\ + ${{ inputs.test-group == 'providers' && 'prov' || inputs.test-group}}:\ ${{ inputs.test-name }}${{ inputs.test-name-separator }}${{ matrix.backend-version }}:\ ${{ matrix.python-version}}:${{ matrix.test-types.description }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: fail-fast: false matrix: @@ -147,6 +163,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} INCLUDE_SUCCESS_OUTPUTS: ${{ inputs.include-success-outputs }} + PLATFORM: "${{ inputs.platform }}" # yamllint disable rule:line-length JOB_ID: "${{ inputs.test-group }}-${{ matrix.test-types.description }}-${{ inputs.test-scope }}-${{ inputs.test-name }}-${{inputs.backend}}-${{ matrix.backend-version }}-${{ matrix.python-version }}" MOUNT_SOURCES: "skip" @@ -154,26 +171,31 @@ jobs: PARALLEL_TEST_TYPES: ${{ matrix.test-types.test_types }} PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}" UPGRADE_BOTO: "${{ inputs.upgrade-boto }}" + UPGRADE_SQLALCHEMY: "${{ inputs.upgrade-sqlalchemy }}" AIRFLOW_MONITOR_DELAY_TIME_IN_SECONDS: "${{inputs.monitor-delay-time-in-seconds}}" VERBOSE: "true" + DEFAULT_BRANCH: "${{ inputs.default-branch }}" + TOTAL_TEST_TIMEOUT: "3600" # 60 minutes in seconds if: inputs.test-group == 'core' || inputs.skip-providers-tests != 'true' steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" + platform: ${{ inputs.platform }} python: ${{ matrix.python-version }} use-uv: ${{ inputs.use-uv }} - name: > Migration Tests: ${{ matrix.python-version }}:${{ env.PARALLEL_TEST_TYPES }} uses: ./.github/actions/migration_tests + with: + python-version: ${{ matrix.python-version }} if: inputs.run-migration-tests == 'true' && inputs.test-group == 'core' - name: > ${{ inputs.test-group }}:${{ inputs.test-scope }} Tests ${{ inputs.test-name }} ${{ matrix.backend-version }} diff --git a/.github/workflows/special-tests.yml b/.github/workflows/special-tests.yml index d47907f923f54..d410375f4054d 100644 --- a/.github/workflows/special-tests.yml +++ b/.github/workflows/special-tests.yml @@ -20,8 +20,12 @@ name: Special tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + runners: + description: "The array of labels (in json form) determining runners." + required: true + type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string default-branch: @@ -90,7 +94,8 @@ jobs: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} downgrade-sqlalchemy: "true" test-name: "MinSQLAlchemy-Postgres" test-scope: "DB" @@ -105,6 +110,7 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} tests-min-sqlalchemy-providers: name: "Min SQLAlchemy test: providers" @@ -113,7 +119,8 @@ jobs: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} downgrade-sqlalchemy: "true" test-name: "MinSQLAlchemy-Postgres" test-scope: "DB" @@ -128,66 +135,77 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} - tests-boto-core: - name: "Latest Boto test: core" + tests-latest-sqlalchemy: + name: "Latest SQLAlchemy test: core" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - upgrade-boto: "true" - test-name: "LatestBoto-Postgres" - test-scope: "All" + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} + upgrade-sqlalchemy: "true" + test-name: "LatestSQLAlchemy-Postgres" + test-scope: "DB" test-group: "core" backend: "postgres" - python-versions: "['${{ inputs.default-python-version }}']" + # The python version constraint is a TEMPORARY WORKAROUND to exclude all FAB tests. It should be + # removed after upgrading FAB to v5 (PR #50960). The setting below should be: + # "['${{ inputs.default-python-version }}']" + python-versions: "['3.13']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" test-types-as-strings-in-json: ${{ inputs.core-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ inputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} + if: contains(fromJSON(inputs.python-versions), '3.13') # Remove this line after upgrading FAB to v5 - tests-boto-providers: - name: "Latest Boto test: providers" + tests-latest-sqlalchemy-providers: + name: "Latest SQLAlchemy test: providers" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - upgrade-boto: "true" - test-name: "LatestBoto-Postgres" - test-scope: "All" + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} + upgrade-sqlalchemy: "true" + test-name: "LatestSQLAlchemy-Postgres" + test-scope: "DB" test-group: "providers" backend: "postgres" - python-versions: "['${{ inputs.default-python-version }}']" + # The python version constraint is a TEMPORARY WORKAROUND to exclude all FAB tests. It should be + # removed after upgrading FAB to v5 (PR #50960). The setting below should be: + # "['${{ inputs.default-python-version }}']" + python-versions: "['3.13']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" test-types-as-strings-in-json: ${{ inputs.providers-test-types-list-as-strings-in-json }} - include-success-outputs: ${{ inputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} + if: contains(fromJSON(inputs.python-versions), '3.13') # Remove this line after upgrading FAB to v5 - - tests-pendulum-2-core: - name: "Pendulum2 test: core" + tests-boto-core: + name: "Latest Boto test: core" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - downgrade-pendulum: "true" - test-name: "Pendulum2-Postgres" + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} + upgrade-boto: "true" + test-name: "LatestBoto-Postgres" test-scope: "All" test-group: "core" backend: "postgres" @@ -201,17 +219,19 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} - tests-pendulum-2-providers: - name: "Pendulum2 test: providers" + tests-boto-providers: + name: "Latest Boto test: providers" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - downgrade-pendulum: "true" - test-name: "Pendulum2-Postgres" + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} + upgrade-boto: "true" + test-name: "LatestBoto-Postgres" test-scope: "All" test-group: "providers" backend: "postgres" @@ -225,17 +245,20 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} - tests-quarantined-core: - name: "Quarantined test: core" + tests-pendulum-2-core: + name: "Pendulum2 test: core" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - test-name: "Postgres" - test-scope: "Quarantined" + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} + downgrade-pendulum: "true" + test-name: "Pendulum2-Postgres" + test-scope: "All" test-group: "core" backend: "postgres" python-versions: "['${{ inputs.default-python-version }}']" @@ -248,17 +271,20 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} - tests-quarantined-providers: - name: "Quarantined test: providers" + tests-pendulum-2-providers: + name: "Pendulum2 test: providers" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - test-name: "Postgres" - test-scope: "Quarantined" + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} + downgrade-pendulum: "true" + test-name: "Pendulum2-Postgres" + test-scope: "All" test-group: "providers" backend: "postgres" python-versions: "['${{ inputs.default-python-version }}']" @@ -271,18 +297,19 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} - - tests-arm-collection-core: - name: "ARM Collection test: core" + tests-quarantined-core: + name: "Quarantined test: core" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} test-name: "Postgres" - test-scope: "ARM collection" + test-scope: "Quarantined" test-group: "core" backend: "postgres" python-versions: "['${{ inputs.default-python-version }}']" @@ -295,32 +322,32 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} - if: ${{ inputs.default-branch == 'main' }} + default-branch: ${{ inputs.default-branch }} - tests-arm-collection-providers: - name: "ARM Collection test: providers" + tests-quarantined-providers: + name: "Quarantined test: providers" uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} test-name: "Postgres" - test-scope: "ARM collection" + test-scope: "Quarantined" test-group: "providers" backend: "postgres" python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - test-types-as-strings-in-json: ${{ inputs.core-test-types-list-as-strings-in-json }} + test-types-as-strings-in-json: ${{ inputs.providers-test-types-list-as-strings-in-json }} include-success-outputs: ${{ inputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} - if: ${{ inputs.default-branch == 'main' }} - + default-branch: ${{ inputs.default-branch }} tests-system-core: name: "System test: ${{ matrix.test-group }}" @@ -329,7 +356,8 @@ jobs: contents: read packages: read with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} + runners: ${{ inputs.runners }} + platform: ${{ inputs.platform }} test-name: "SystemTest" test-scope: "System" test-group: "core" @@ -344,3 +372,4 @@ jobs: debug-resources: ${{ inputs.debug-resources }} skip-providers-tests: ${{ inputs.skip-providers-tests }} use-uv: ${{ inputs.use-uv }} + default-branch: ${{ inputs.default-branch }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 2e03e9f33b120..5724a17314aec 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -29,7 +29,7 @@ jobs: stale: runs-on: ["ubuntu-22.04"] steps: - - uses: actions/stale@v9 + - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 with: stale-pr-message: > This pull request has been automatically marked as stale because it has not had diff --git a/.github/workflows/test-providers.yml b/.github/workflows/test-providers.yml index 39f96b2c89afc..3c187c42a0746 100644 --- a/.github/workflows/test-providers.yml +++ b/.github/workflows/test-providers.yml @@ -20,8 +20,12 @@ name: Provider tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + runners: + description: "The array of labels (in json form) determining public AMD runners." + required: true + type: string + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string canary-run: @@ -68,7 +72,7 @@ jobs: prepare-install-verify-provider-distributions: timeout-minutes: 80 name: "Providers ${{ matrix.package-format }} tests" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: fail-fast: false matrix: @@ -85,14 +89,14 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" - python: ${{ inputs.default-python-version }} + platform: ${{ inputs.platform }} + python: "${{ inputs.default-python-version }}" use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist files" run: rm -fv ./dist/* @@ -104,14 +108,14 @@ jobs: - name: "Prepare provider distributions: ${{ matrix.package-format }}" run: > breeze release-management prepare-provider-distributions --include-not-ready-providers - --version-suffix-for-pypi dev0 --distribution-format ${{ matrix.package-format }} + --skip-tag-check --distribution-format ${{ matrix.package-format }} - name: "Prepare airflow package: ${{ matrix.package-format }}" run: > - breeze release-management prepare-airflow-distributions --version-suffix-for-pypi dev0 + breeze release-management prepare-airflow-distributions --distribution-format ${{ matrix.package-format }} - name: "Prepare task-sdk package: ${{ matrix.package-format }}" run: > - breeze release-management prepare-task-sdk-distributions --version-suffix-for-pypi dev0 + breeze release-management prepare-task-sdk-distributions --distribution-format ${{ matrix.package-format }} - name: "Verify ${{ matrix.package-format }} packages with twine" run: | @@ -122,10 +126,6 @@ jobs: breeze release-management generate-issue-content-providers --only-available-in-dist --disable-progress if: matrix.package-format == 'wheel' - - name: Remove Python 3.9-incompatible provider distributions - run: | - echo "Removing Python 3.9-incompatible provider: cloudant" - rm -vf dist/*cloudant* - name: "Generate source constraints from CI image" shell: bash run: > @@ -134,7 +134,8 @@ jobs: - name: "Install and verify wheel provider distributions" env: DISTRIBUTION_FORMAT: ${{ matrix.package-format }} - AIRFLOW_SKIP_CONSTRAINTS: "${{ inputs.upgrade-to-newer-dependencies }}" + # yamllint disable rule:line-length + INSTALL_AIRFLOW_WITH_CONSTRAINTS: "${{ inputs.upgrade-to-newer-dependencies == 'true' && 'false' || 'true' }}" run: > breeze release-management verify-provider-distributions --use-distributions-from-dist @@ -163,7 +164,7 @@ jobs: timeout-minutes: 80 # yamllint disable rule:line-length name: Compat ${{ matrix.compat.airflow-version }}:P${{ matrix.compat.python-version }}:${{ matrix.compat.test-types.description }} - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + runs-on: ${{ fromJSON(inputs.runners) }} strategy: fail-fast: false matrix: @@ -175,7 +176,6 @@ jobs: GITHUB_USERNAME: ${{ github.actor }} INCLUDE_NOT_READY_PROVIDERS: "true" PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.compat.python-version }}" - VERSION_SUFFIX_FOR_PYPI: "dev0" VERBOSE: "true" CLEAN_AIRFLOW_INSTALLATION: "true" if: inputs.skip-providers-tests != 'true' @@ -184,13 +184,13 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ matrix.compat.python-version }}" uses: ./.github/actions/prepare_breeze_and_image with: - platform: "linux/amd64" + platform: ${{ inputs.platform }} python: ${{ matrix.compat.python-version }} use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist files" @@ -198,7 +198,7 @@ jobs: - name: "Prepare provider distributions: wheel" run: > breeze release-management prepare-provider-distributions --include-not-ready-providers - --distribution-format wheel + --distribution-format wheel --skip-tag-check # yamllint disable rule:line-length - name: Remove incompatible Airflow ${{ matrix.compat.airflow-version }}:Python ${{ matrix.compat.python-version }} provider distributions env: diff --git a/.gitignore b/.gitignore index 1b0a579167847..f7358b05f30e9 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ airflow/git_version airflow/ui/coverage/ logs/ airflow-webserver.pid +airflow-api-server.pid standalone_admin_password.txt warnings.txt warn-summary-*.txt @@ -127,6 +128,9 @@ ENV/ .idea/ *.iml +# Cursor +.cursor/ + # vim *.swp @@ -267,3 +271,6 @@ airflow-build-dockerfile* *.zip _api/ + +#while running go tests inside the go-sdk, it can generate log files for dags, ignore all logs +go-sdk/**/*.log diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 503814fbf5c77..e41cd56e95879 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,8 @@ default_stages: [pre-commit, pre-push] default_language_version: python: python3 - node: 22.14.0 + node: 22.18.0 + golang: 1.24.0 minimum_pre_commit_version: '3.2.0' exclude: ^.*/.*_vendor/ repos: @@ -30,17 +31,26 @@ repos: - id: check-hooks-apply name: Check if all hooks apply to the repository - repo: https://github.com/thlorenz/doctoc.git - rev: v2.2.0 + rev: 70fdcd39ef919754011a827bd25f23a0b141c3c3 # frozen: v2.2.0 hooks: - id: doctoc name: Add TOC for Markdown and RST files files: - ^README\.md$|^UPDATING.*\.md$|^chart/UPDATING.*\.md$|^dev/.*\.md$|^dev/.*\.rst$|^\.github/.*\.md|^airflow-core/tests/system/README\.md$ + (?x) + ^README\.md$| + ^UPDATING.*\.md$| + ^chart/UPDATING.*\.md$| + ^dev/.*\.md$| + ^dev/.*\.rst$| + ^docs/README\.md$| + ^\.github/.*\.md$| + ^airflow-core/tests/system/README\.md$ args: - "--maxlevel" - "2" - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.5 + # replace hash with version once PR #103 merged comes in a release + rev: abdd8b62891099da34162217ecb3872d22184a51 hooks: - id: insert-license name: Add license for all SQL files @@ -96,7 +106,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all Python files - exclude: ^\.github/.*$|^.*/_vendor/.*$ + exclude: ^\.github/.*$|^.*/_vendor/.*$|^airflow-ctl/.*/.*generated\.py$ files: \.py$|\.pyi$ args: - --comment-style @@ -125,7 +135,14 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all YAML files except Helm templates - exclude: ^\.github/.*$|^chart/templates/.*|.*reproducible_build\.yaml$|^.*/v1.*\.yaml$|^.*/openapi/_private_ui.*\.yaml$|^.*/pnpm-lock\.yaml$ + exclude: > + (?x) + ^\.github/.*$|^chart/templates/.*| + .*reproducible_build\.yaml$| + ^.*/v2.*\.yaml$| + ^.*/openapi/_private_ui.*\.yaml$| + ^.*/pnpm-lock\.yaml$| + .*-generated\.yaml$ types: [yaml] files: \.ya?ml$ args: @@ -155,6 +172,18 @@ repos: - --fuzzy-match-generates-todo files: > \.cfg$|\.conf$|\.ini$|\.ldif$|\.properties$|\.service$|\.tf$|Dockerfile.*$ + - id: insert-license + name: Add license for all Go files + types: [go] + exclude: mocks/.*\.go$ + args: + - --comment-style + - "|//|" + - --license-filepath + - scripts/ci/license-templates/LICENSE.txt + - --insert-license-after-regex + # We need this 'generated by' line at the top for `golines` to not format it + - '// Code generated by .*' - repo: local hooks: - id: check-min-python-version @@ -163,13 +192,6 @@ repos: language: python additional_dependencies: ['rich>=12.4.4'] require_serial: true - - id: check-imports-in-providers - name: Check imports in providers - entry: ./scripts/ci/pre_commit/check_imports_in_providers.py - language: python - additional_dependencies: ['rich>=12.4.4', 'ruff==0.11.2'] - files: ^providers/.*/src/airflow/providers/.*\.py$ - require_serial: true - id: update-black-version name: Update black versions everywhere (manual) entry: ./scripts/ci/pre_commit/update_black_version.py @@ -184,10 +206,14 @@ repos: entry: ./scripts/ci/pre_commit/update_installers_and_pre_commit.py stages: ['manual'] language: python - files: ^\.pre-commit-config\.yaml$|^scripts/ci/pre_commit/update_installers_and_pre_commit\.py$ + files: > + (?x) + ^\.pre-commit-config\.yaml$| + ^\.github/\.pre-commit-config\.yaml$| + ^scripts/ci/pre_commit/update_installers_and_pre_commit\.py$ pass_filenames: false require_serial: true - additional_dependencies: ['pyyaml>=6.0.2', 'rich>=12.4.4', 'requests>=2.31.0'] + additional_dependencies: ['pyyaml>=6.0.2', 'rich>=12.4.4', 'requests>=2.31.0',"packaging>=25"] - id: update-chart-dependencies name: Update chart dependencies to latest (manual) entry: ./scripts/ci/pre_commit/update_chart_dependencies.py @@ -210,23 +236,23 @@ repos: language: python entry: ./scripts/ci/pre_commit/check_deferrable_default.py pass_filenames: false - additional_dependencies: ['libcst>=1.1.0'] + # libcst doesn't have source wheels for all PY except PY3.12, excluding it + additional_dependencies: ['libcst>=1.8.1'] files: ^(providers/.*/)?airflow/.*/(sensors|operators)/.*\.py$ - repo: https://github.com/asottile/blacken-docs - rev: 1.19.1 + rev: 78a9dcbecf4f755f65d1f3dec556bc249d723600 # frozen: 1.19.1 hooks: - id: blacken-docs name: Run black on docs args: - --line-length=110 - - --target-version=py39 - --target-version=py310 - --target-version=py311 - --target-version=py312 alias: blacken-docs additional_dependencies: ['black==24.10.0'] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0 hooks: - id: check-merge-conflict name: Check that merge conflicts are not being committed @@ -239,20 +265,30 @@ repos: exclude: ^providers/ssh/docs/connections/ssh\.rst$ - id: end-of-file-fixer name: Make sure that there is an empty line at the end - exclude: ^airflow-core/docs/img/.*\.dot|^airflow-core/docs/img/.*\.sha256|.*/dist/.*|LICENSES-ui\.txt$ + exclude: > + (?x) + ^airflow-core/docs/img/.*\.dot| + ^airflow-core/docs/img/.*\.sha256| + .*/dist/.*| + LICENSES-ui\.txt$| + .*/openapi-gen/.* - id: mixed-line-ending name: Detect if mixed line ending is used (\r vs. \r\n) - id: check-executables-have-shebangs name: Check that executables have shebang - id: check-xml name: Check XML files with xmllint + exclude: > + (?x) + ^scripts/ci/docker-compose/gremlin/. - id: trailing-whitespace name: Remove trailing whitespace at end of line - exclude: ^airflow-core/docs/img/.*\.dot$|^dev/breeze/doc/images/output.*$ - - id: fix-encoding-pragma - name: Remove encoding header from Python files - args: - - --remove + exclude: > + (?x) + ^airflow-core/docs/img/.*\.dot$| + ^dev/breeze/doc/images/output.*$| + ^.*/openapi-gen/.*$| + ^airflow-ctl/docs/images/.*\.svg$ - id: pretty-format-json name: Format JSON files args: @@ -263,14 +299,14 @@ repos: files: ^chart/values\.schema\.json$|^chart/values_schema\.schema\.json$ pass_filenames: true - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.10.0 + rev: 3a6eb0fadf60b3cccfd80bad9dbb6fae7e47b316 # frozen: v1.10.0 hooks: - id: rst-backticks name: Check if RST files use double backticks for code - id: python-no-log-warn name: Check if there are no deprecate log warn - repo: https://github.com/adrienverge/yamllint - rev: v1.36.2 + rev: 79a6b2b1392eaf49cdd32ac4f14be1a809bbd8f7 # frozen: v1.37.1 hooks: - id: yamllint name: Check YAML files with yamllint @@ -281,13 +317,14 @@ repos: ^.*airflow\.template\.yaml$| ^.*init_git_sync\.template\.yaml$| ^chart/(?:templates|files)/.*\.yaml$| + ^helm-tests/tests/chart_utils/keda.sh_scaledobjects\.yaml$| .*/v1.*\.yaml$| ^.*openapi.*\.yaml$| ^\.pre-commit-config\.yaml$| ^.*reproducible_build\.yaml$| ^.*pnpm-lock\.yaml$ - repo: https://github.com/ikamensh/flynt - rev: '1.0.1' + rev: '97be693bf18bc2f050667dd282d243e2824b81e2' # frozen: 1.0.6 hooks: - id: flynt name: Run flynt string format converter for Python @@ -298,7 +335,7 @@ repos: - --line-length - '99999' - repo: https://github.com/codespell-project/codespell - rev: v2.4.1 + rev: 63c8f8312b7559622c0d82815639671ae42132ac # frozen: v2.4.1 hooks: - id: codespell name: Run codespell @@ -308,13 +345,22 @@ repos: The word(s) should be in lowercase." && exec codespell "$@"' -- language: python types: [text] - exclude: material-icons\.css$|^images/.*$|^RELEASE_NOTES\.txt$|^.*package-lock\.json$|^.*/kinglear\.txt$|^.*pnpm-lock\.yaml$|.*/dist/.* + exclude: > + (?x) + material-icons\.css$| + ^images/.*$| + ^RELEASE_NOTES\.txt$| + ^.*package-lock\.json$| + ^.*/kinglear\.txt$| + ^.*pnpm-lock\.yaml$| + .*/dist/.*| + ^airflow-core/src/airflow/ui/public/i18n/locales/(?!en/).+/ args: - --ignore-words=docs/spelling_wordlist.txt - - --skip=providers/.*/src/airflow/providers/*/*.rst,providers/*/docs/changelog.rst,docs/*/commits.rst,providers/*/docs/commits.rst,providers/*/*/docs/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md + - --skip=providers/.*/src/airflow/providers/*/*.rst,providers/*/docs/changelog.rst,docs/*/commits.rst,providers/*/docs/commits.rst,providers/*/*/docs/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md,*.svg - --exclude-file=.codespellignorelines - repo: https://github.com/woodruffw/zizmor-pre-commit - rev: v1.5.1 + rev: 807e9d4c61778a1c3082413cf4ff50629483d8bb # frozen: v1.12.0 hooks: - id: zizmor name: Run zizmor to check for github workflow syntax errors @@ -338,6 +384,20 @@ repos: always_run: true pass_filenames: false additional_dependencies: ['rich>=12.4.4', 'pyyaml>=6.0.2', 'tomli>=2.0.1'] + - id: check-shared-distributions-structure + name: Check shared distributions structure + entry: ./scripts/ci/pre_commit/check_shared_distributions_structure.py + language: python + additional_dependencies: ['rich>=12.4.4', 'tomli>=2.0.1'] + pass_filenames: false + files: ^shared/.*$ + - id: check-shared-distributions-usage + name: Check shared distributions usage + entry: ./scripts/ci/pre_commit/check_shared_distributions_usage.py + language: python + additional_dependencies: ['rich>=12.4.4', 'tomli>=2.0.1'] + pass_filenames: false + files: ^shared/.*$|^.*/pyproject.toml$|^.*/_shared/.*$ - id: validate-operators-init name: No templated field logic checks in operator __init__ description: Prevent templated field logic checks in operators' __init__ @@ -362,6 +422,13 @@ repos: ^airflow_breeze/templates/PROVIDER_README_TEMPLATE\.rst\.jinja2$ additional_dependencies: ['rich>=12.4.4','requests>=2.31.0'] require_serial: true + - id: check-airflow-v-imports-in-tests + name: Check AIRFLOW_V imports in tests + language: python + entry: ./scripts/ci/pre_commit/check_airflow_v_imports_in_tests.py + pass_filenames: true + files: ^providers/.*/tests/.+\.py$ + additional_dependencies: ['rich>=12.4.4'] - id: ruff name: Run 'ruff' for extremely fast Python linting description: "Run 'ruff' for extremely fast Python linting" @@ -370,7 +437,7 @@ repos: types_or: [python, pyi] args: [--fix] require_serial: true - additional_dependencies: ['ruff==0.11.2'] + additional_dependencies: ['ruff==0.12.8'] exclude: ^airflow-core/tests/unit/dags/test_imports\.py$|^performance/tests/test_.*\.py$ - id: ruff-format name: Run 'ruff format' @@ -380,14 +447,14 @@ repos: types_or: [python, pyi] args: [] require_serial: true - additional_dependencies: ['ruff==0.11.2'] + additional_dependencies: ['ruff==0.12.8'] exclude: ^airflow-core/tests/unit/dags/test_imports\.py$ - id: replace-bad-characters name: Replace bad characters entry: ./scripts/ci/pre_commit/replace_bad_characters.py language: python types: [file, text] - exclude: ^clients/gen/go\.sh$|^\.gitmodules$|.*/dist/.* + exclude: ^clients/gen/go\.sh$|^\.gitmodules$|^airflow-core/src/airflow/ui/openapi-gen/|.*/dist/.*|\.go$|/go\.(mod|sum)$ additional_dependencies: ['rich>=12.4.4'] - id: lint-dockerfile name: Lint Dockerfile @@ -422,7 +489,7 @@ repos: - id: check-airflow-providers-bug-report-template name: Sort airflow-bug-report provider list language: python - files: ^\.github/ISSUE_TEMPLATE/airflow_providers_bug_report\.yml$ + files: ^\.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report\.yml$ require_serial: true entry: ./scripts/ci/pre_commit/check_airflow_bug_report_template.py additional_dependencies: ['rich>=12.4.4', 'pyyaml>=6.0.2'] @@ -456,7 +523,7 @@ repos: entry: ./scripts/ci/pre_commit/check_common_sql_dependency.py language: python files: ^providers/.*/src/airflow/providers/.*/hooks/.*\.py$ - additional_dependencies: ['rich>=12.4.4', 'pyyaml>=6.0.2', 'packaging>=23.2'] + additional_dependencies: ['rich>=12.4.4', 'pyyaml>=6.0.2', 'packaging>=25'] - id: check-extra-packages-references name: Checks setup extra packages description: Checks if all the extras defined in hatch_build.py are listed in extra-packages-ref.rst file @@ -476,7 +543,10 @@ repos: name: Generate airflow diagrams entry: ./scripts/ci/pre_commit/generate_airflow_diagrams.py language: python - files: ^airflow-core/docs/.*/diagram_[^/]*\.py$ + files: > + (?x) + ^airflow-core/docs/.*/diagram_[^/]*\.py$| + ^docs/images/.*\.py$ pass_filenames: true additional_dependencies: ['rich>=12.4.4', 'diagrams>=0.23.4'] - id: generate-volumes-for-sources @@ -487,6 +557,16 @@ repos: pass_filenames: false require_serial: true additional_dependencies: ['rich>=12.4.4'] + - id: prevent-deprecated-sqlalchemy-usage + name: Prevent deprecated sqlalchemy usage + entry: ./scripts/ci/pre_commit/prevent_deprecated_sqlalchemy_usage.py + language: python + additional_dependencies: ['rich>=12.4.4'] + files: > + (?x) + ^airflow-ctl.*\.py$| + ^task_sdk.*\.py$ + pass_filenames: true - id: update-supported-versions name: Updates supported versions in documentation entry: ./scripts/ci/pre_commit/supported_versions.py @@ -506,12 +586,12 @@ repos: ^providers/fab/src/airflow/providers/fab/migrations/versions/.*$|^providers/fab/src/airflow/providers/fab/migrations/versions| ^airflow-core/src/airflow/utils/db\.py$| ^providers/fab/src/airflow/providers/fab/auth_manager/models/db\.py$ - additional_dependencies: ['packaging>=23.2', 'rich>=12.4.4'] + additional_dependencies: ['packaging>=25', 'rich>=12.4.4'] - id: update-version name: Update versions in docs entry: ./scripts/ci/pre_commit/update_versions.py language: python - files: ^docs|^airflow-core/src/airflow/__init__\.py$ + files: ^docs|^airflow-core/src/airflow/__init__\.py$|.*/pyproject\.toml$ pass_filenames: false additional_dependencies: ['rich>=12.4.4'] - id: check-pydevd-left-in-code @@ -520,6 +600,60 @@ repos: entry: "pydevd.*settrace\\(" pass_filenames: true files: \.py$ + - id: check-pytest-mark-db-test-in-providers + language: pygrep + name: Check pytest.mark.db_test use in providers + entry: pytest\.mark\.db_test + pass_filenames: true + # Here we should add providers that are already free from the pytest.mark.db_test + # and we want to keep them clean and only use non-db-tests + files: > + (?x) + ^providers/airbyte/.*\.py$| + ^providers/apache/beam/.*\.py$| + ^providers/apache/flink/.*\.py$| + ^providers/apache/iceberg/.*\.py$| + ^providers/apache/kafka/.*\.py$| + ^providers/arangodb/.*\.py$| + ^providers/asana/.*\.py$| + ^providers/cloudant/.*\.py$| + ^providers/cohere/.*\.py$| + ^providers/common/compat/.*\.py$| + ^providers/common/messaging/.*\.py$| + ^providers/datadog/.*\.py$| + ^providers/dingding/.*\.py$| + ^providers/discord/.*\.py$| + ^providers/exasol/.*\.py$| + ^providers/facebook/.*\.py$| + ^providers/ftp/.*\.py$| + ^providers/grpc/.*\.py$| + ^providers/hashicorp/.*\.py$| + ^providers/imap/.*\.py$| + ^providers/influxdb/.*\.py$| + ^providers/jdbc/.*\.py$| + ^providers/jenkins/.*\.py$| + ^providers/mongo/.*\.py$| + ^providers/microsoft/psrp/.*\.py$| + ^providers/microsoft/winrm/.*\.py$| + ^providers/neo4j/.*\.py$| + ^providers/odbc/.*\.py$| + ^providers/openai/.*\.py$| + ^providers/openfaas/.*\.py$| + ^providers/oracle/.*\.py$| + ^providers/pagerduty/.*\.py$| + ^providers/pgvector/.*\.py$| + ^providers/pinecone/.*\.py$| + ^providers/postgres/.*\.py$| + ^providers/presto/.*\.py$| + ^providers/segment/.*\.py$| + ^providers/sendgrid/.*\.py$| + ^providers/singularity/.*\.py$| + ^providers/tableau/.*\.py$| + ^providers/teradata/.*\.py$| + ^providers/trino/.*\.py$| + ^providers/vertica/.*\.py$| + ^providers/yandex/.*\.py$| + ^providers/zendesk/.*\.py$ - id: check-links-to-example-dags-do-not-use-hardcoded-versions name: Verify no hard-coded version in example dags description: The links to example dags should use |version| as version specification @@ -583,8 +717,9 @@ repos: pass_filenames: true exclude: > (?x) - ^clients/python/openapi_v1.yaml$| + ^airflow-core/src/airflow/ui/src/i18n/config\.ts$| ^airflow-core/src/airflow/ui/openapi-gen/| + ^airflow-core/src/airflow/ui/public/i18n/locales/de/README\.md$| ^airflow-core/src/airflow/cli/commands/local_commands/fastapi_api_command\.py$| ^airflow-core/src/airflow/config_templates/| ^airflow-core/src/airflow/models/baseoperator\.py$| @@ -606,6 +741,7 @@ repos: ^providers/google/src/airflow/providers/google/cloud/operators/cloud_build\.py$| ^providers/google/src/airflow/providers/google/cloud/operators/dataproc\.py$| ^providers/google/src/airflow/providers/google/cloud/operators/mlengine\.py$| + ^providers/keycloak/src/airflow/providers/keycloak/auth_manager/cli/definition.py| ^providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/cosmos\.py$| ^providers/microsoft/winrm/src/airflow/providers/microsoft/winrm/hooks/winrm\.py$| ^airflow-core/docs/.*commits\.rst$| @@ -619,6 +755,7 @@ repos: ^airflow-core/src/airflow/utils/trigger_rule\.py$| ^chart/values.schema\.json$| ^helm-tests/tests/chart_utils/helm_template_generator\.py$| + ^helm-tests/tests/chart_utils/ingress-networking-v1beta1\.json$| ^dev/| ^devel-common/src/docs/README\.rst$| ^docs/apache-airflow-providers-amazon/secrets-backends/aws-ssm-parameter-store\.rst$| @@ -633,6 +770,7 @@ repos: ^.*/conf_constants\.py$| ^.*/provider_conf\.py$| ^devel-common/src/sphinx_exts/removemarktransform\.py| + ^devel-common/src/tests_common/test_utils/db\.py| ^airflow-core/newsfragments/41761.significant\.rst$| ^scripts/ci/pre_commit/vendor_k8s_json_schema\.py$| ^scripts/ci/docker-compose/integration-keycloak\.yml$| @@ -645,7 +783,7 @@ repos: ^.*commits\.(rst|txt)$| ^.*RELEASE_NOTES\.rst$| ^contributing-docs/03_contributors_quick_start\.rst$| - ^.*\.(png|gif|jp[e]?g|tgz|lock)$| + ^.*\.(png|gif|jp[e]?g|svg|tgz|lock)$| git| ^airflow-core/newsfragments/43349\.significant\.rst$| ^airflow-core/newsfragments/41368\.significant\.rst$| @@ -707,7 +845,7 @@ repos: files: > (?x) ^providers/.*/src/airflow/providers/.*\.py$ - exclude: providers/standard/.*/.*\.py$ + exclude: ^providers/standard/.*/.*\.py$ - id: check-get-lineage-collector-providers language: python name: Check providers import hook lineage code from compat @@ -731,7 +869,7 @@ repos: name: Verify usage of Airflow deprecation classes in core entry: category=DeprecationWarning|category=PendingDeprecationWarning files: \.py$ - exclude: ^airflow-core/src/airflow/configuration\.py$|airflow-core/tests/.*$|^providers/.*/src/airflow/providers/|^scripts/in_container/verify_providers\.py$|providers/.*/tests/.*$|^devel-common/ + exclude: ^airflow-core/src/airflow/configuration\.py$|^airflow-core/tests/.*$|^providers/.*/src/airflow/providers/|^scripts/in_container/verify_providers\.py$|^providers/.*/tests/.*$|^devel-common/ pass_filenames: true - id: check-provide-create-sessions-imports language: pygrep @@ -746,12 +884,6 @@ repos: entry: "LoggingMixin\\(\\)" files: \.py$ pass_filenames: true - - id: check-daysago-import-from-utils - language: pygrep - name: days_ago imported from airflow.utils.dates - entry: "(airflow\\.){0,1}utils\\.dates\\.days_ago" - files: \.py$ - pass_filenames: true - id: check-start-date-not-used-in-defaults language: pygrep name: start_date not in default_args @@ -808,6 +940,13 @@ repos: files: ^chart require_serial: true additional_dependencies: ['rich>=12.4.4','requests>=2.31.0'] + - id: validate-chart-annotations + name: Validate chart annotations + entry: ./scripts/ci/pre_commit/validate_chart_annotations.py + language: python + pass_filenames: false + files: ^chart/Chart\.yaml$ + additional_dependencies: ['pyyaml>=6.0.2', 'rich>=12.4.4'] - id: kubeconform name: Kubeconform check on our helm chart entry: ./scripts/ci/pre_commit/check_kubeconform.py @@ -834,9 +973,8 @@ repos: - id: compile-fab-assets name: Compile FAB provider assets language: node - 'types_or': [javascript, ts, tsx] files: ^providers/fab/.*/www/ - entry: ./scripts/ci/pre_commit/compile_fab_assets.py + entry: ./scripts/ci/pre_commit/compile_provider_assets.py fab pass_filenames: false additional_dependencies: ['yarn@1.22.21'] - id: compile-ui-assets-dev @@ -886,10 +1024,15 @@ repos: name: Update Airflow's meta-package pyproject.toml language: python entry: ./scripts/ci/pre_commit/update_airflow_pyproject_toml.py - files: ^pyproject\.toml$ + files: > + (?x) + ^.*/pyproject\.toml$| + ^scripts/ci/pre_commit/update_airflow_pyproject_toml\.py$| + ^providers/.*/pyproject\.toml$| + ^providers/.*/provider\.yaml$ pass_filenames: false require_serial: true - additional_dependencies: ['rich>=12.4.4', 'tomli>=2.0.1', 'packaging>=23.2' ] + additional_dependencies: ['rich>=12.4.4', 'tomli>=2.0.1', 'packaging>=25' ] - id: update-reproducible-source-date-epoch name: Update Source Date Epoch for reproducible builds language: python @@ -970,6 +1113,7 @@ repos: exclude: > (?x) ^scripts/ci/docker-compose/grafana/.| + ^scripts/ci/docker-compose/gremlin/.| ^scripts/ci/docker-compose/.+-config\.ya?ml$ require_serial: true additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'pyyaml>=6.0.2', 'requests==2.32.3', 'rich>=12.4.4'] @@ -1084,7 +1228,7 @@ repos: name: Check significant newsfragments are valid # Significant newsfragments follows a special format so that we can group information easily. language: python - files: airflow-core/newsfragments/.*\.rst$ + files: ^airflow-core/newsfragments/.*\.rst$ entry: ./scripts/ci/pre_commit/significant_newsfragments_checker.py pass_filenames: false additional_dependencies: ['docutils>=0.21.2', 'pygments>=2.19.1', 'jinja2>=3.1.5'] @@ -1207,7 +1351,71 @@ repos: pass_filenames: true files: ^airflow-core/src/airflow/migrations/versions/.*\.py$ exclude: - airflow-core/src/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py + ^airflow-core/src/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py$ + - id: go-mockery + name: Generate mocks for go + entry: -w /src/go-sdk vektra/mockery:3 + files: ^go-sdk/ + exclude: mocks/.*\.go$ + types: [go] + pass_filenames: false + language: docker_image + - id: go-mod-tidy + name: Run go mod tidy + entry: bash -c "cd go-sdk && go mod tidy" + files: ^go-sdk/ + exclude: mocks/.*\.go$ + pass_filenames: false + language: system + - id: gofmt + name: Format go code + entry: golines --base-formatter=gofumpt --write-output --max-len=100 --chain-split-dots + additional_dependencies: [github.com/segmentio/golines@latest, mvdan.cc/gofumpt@v0.8.0] + files: ^go-sdk/ + types: [go] + language: golang + - id: gci + name: Consistent import ordering for Go files + # Since this is invoked from the root folder, not go-sdk/, gci can't auto-detect the prefix + entry: gci write --skip-generated -s standard -s default -s "prefix(github.com/apache/airflow)" + additional_dependencies: [github.com/daixiang0/gci@v0.13.6] + files: ^go-sdk/ + types: [go] + language: golang + - id: ts-compile-lint-ui + name: Compile / format / lint UI + description: TS types generation / ESLint / Prettier new UI files + language: node + files: | + (?x) + ^airflow-core/src/airflow/ui/.*\.(js|ts|tsx|yaml|css|json)$| + ^airflow-core/src/airflow/api_fastapi/core_api/openapi/.*\.yaml$| + ^airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v1.*\.yaml$ + exclude: | + (?x) + ^airflow-core/src/airflow/ui/node-modules/.*| + ^airflow-core/src/airflow/ui/.pnpm-store + entry: ./scripts/ci/pre_commit/ts_compile_lint_ui.py + additional_dependencies: ['pnpm@9.7.1'] + pass_filenames: true + require_serial: true + - id: ts-compile-lint-simple-auth-manager-ui + name: Compile / format / lint simple auth manager UI + description: TS types generation / ESLint / Prettier new UI files + language: node + files: | + (?x) + ^airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/.*\.(js|ts|tsx|yaml|css|json)$| + ^airflow-core/src/airflow/api_fastapi/core_api/openapi/.*\.yaml$| + ^airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/.*\.yaml$ + exclude: | + (?x) + ^airflow-core/src/airflow/api_fastapi/node-modules/.*| + ^airflow-core/src/airflow/api_fastapi/.pnpm-store + entry: ./scripts/ci/pre_commit/ts_compile_lint_simple_auth_manager_ui.py + additional_dependencies: ['pnpm@9.7.1'] + pass_filenames: true + require_serial: true ## ADD MOST PRE-COMMITS ABOVE THAT LINE # The below pre-commits are those requiring CI image to be built - id: mypy-dev @@ -1230,7 +1438,7 @@ repos: name: Run mypy for airflow-core language: python entry: ./scripts/ci/pre_commit/mypy.py - files: airflow-core/.*\.py$ + files: ^airflow-core/.*\.py$ require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow-core @@ -1239,7 +1447,7 @@ repos: language: python entry: ./scripts/ci/pre_commit/mypy_folder.py airflow-core pass_filenames: false - files: airflow-core/.*\.py$ + files: ^airflow-core/.*\.py$ require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-providers @@ -1295,6 +1503,7 @@ repos: language: python entry: ./scripts/ci/pre_commit/mypy.py files: ^airflow-ctl/src/airflowctl/.*\.py$|^airflow-ctl/tests/.*\.py$ + exclude: .*generated.py require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow-ctl @@ -1309,24 +1518,32 @@ repos: - id: generate-openapi-spec name: Generate the FastAPI API spec language: python - entry: ./scripts/ci/pre_commit/update_fastapi_api_spec.py + entry: ./scripts/ci/pre_commit/generate_openapi_spec.py pass_filenames: false files: ^airflow-core/src/airflow/api_fastapi/.*\.py$|^airflow-core/src/airflow/api_fastapi/auth/managers/simple/.*\.py$|^providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/.*\.py$ exclude: ^airflow-core/src/airflow/api_fastapi/execution_api/.* + additional_dependencies: ['rich>=12.4.4', 'openapi-spec-validator>=0.7.1'] + - id: generate-openapi-spec-fab + name: Generate the FastAPI API spec for FAB + language: python + entry: ./scripts/ci/pre_commit/generate_openapi_spec_fab.py + pass_filenames: false + files: ^providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/.*\.py$ + additional_dependencies: ['rich>=12.4.4', 'openapi-spec-validator>=0.7.1'] + - id: generate-openapi-spec-keycloak + name: Generate the FastAPI API spec for Keycloak + language: python + entry: ./scripts/ci/pre_commit/generate_openapi_spec_keycloak.py + pass_filenames: false + files: ^providers/keycloak/src/airflow/providers/keycloak/auth_manager/.*\.py$ + additional_dependencies: [ 'rich>=12.4.4', 'openapi-spec-validator>=0.7.1' ] + - id: check-i18n-json + name: Check i18n files validity + description: Check i18n files are valid json, have no TODOs, and auto-format them + language: python + files: ^airflow-core/src/airflow/ui/public/i18n/locales/.*\.json$ + entry: ./scripts/ci/pre_commit/check_i18n_json.py additional_dependencies: ['rich>=12.4.4'] - - id: ts-compile-format-lint-ui - name: Compile / format / lint UI - description: TS types generation / ESLint / Prettier new UI files - language: node - types_or: [javascript, ts, tsx, yaml, css, json] - files: | - (?x) - ^airflow-core/src/airflow/ui/| - ^airflow-core/src/airflow/api_fastapi/core_api/openapi/.*\.yaml$| - ^airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/| - ^airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v1.*\.yaml$ - entry: ./scripts/ci/pre_commit/compile_lint_ui.py - additional_dependencies: ['pnpm@9.7.1'] pass_filenames: false - id: check-provider-yaml-valid name: Validate provider.yaml files @@ -1353,10 +1570,20 @@ repos: - id: generate-tasksdk-datamodels name: Generate Datamodels for TaskSDK client language: python - entry: uv run -p 3.12 --no-dev --no-progress --active --group codegen --project apache-airflow-task-sdk --directory task-sdk -s dev/generate_task_sdk_models.py + entry: uv run -p 3.12 --no-progress --active --group codegen --project apache-airflow-task-sdk --directory task-sdk -s dev/generate_task_sdk_models.py pass_filenames: false files: ^airflow-core/src/airflow/api_fastapi/execution_api/.*\.py$ require_serial: true + - id: generate-airflowctl-datamodels + name: Generate Datamodels for AirflowCTL + language: python + entry: > + bash -c ' + uv run -p 3.12 --no-dev --no-progress --active --group codegen --project apache-airflow-ctl --directory airflow-ctl/ datamodel-codegen && + uv run -p 3.12 --no-dev --no-progress --active --group codegen --project apache-airflow-ctl --directory airflow-ctl/ datamodel-codegen --input="../airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v2-simple-auth-manager-generated.yaml" --output="src/airflowctl/api/datamodels/auth_generated.py"' + pass_filenames: false + files: ^airflow-core/src/airflow/api_fastapi/core_api/datamodels/.*\.py$|^airflow-core/src/airflow/api_fastapi/auth/managers/simple/(datamodels|routes|services|openapi)/.*\.py$ + require_serial: true - id: update-er-diagram name: Update ER diagram language: python @@ -1372,4 +1599,138 @@ repos: require_serial: true pass_filenames: false files: ^airflow-core/src/airflow/config_templates/config\.yml$ + - id: generate-airflowctl-help-images + name: Generate SVG from Airflow CTL Commands + entry: ./scripts/ci/pre_commit/capture_airflowctl_help.py + language: python + pass_filenames: false + files: + ^airflow-ctl/src/airflowctl/api/operations.py|airflow-ctl/src/airflowctl/ctl/commands/.*\.py$ + additional_dependencies: ['rich>=12.4.4', 'argcomplete>=1.10'] + - id: check-imports-in-providers + name: Check imports in providers + entry: ./scripts/ci/pre_commit/check_imports_in_providers.py + language: python + additional_dependencies: ['rich>=12.4.4', 'ruff==0.12.8'] + files: ^providers/.*/src/airflow/providers/.*version_compat.*\.py$ + require_serial: true + - id: provider-version-compat + name: Check for correct version_compat imports in providers + entry: ./scripts/ci/pre_commit/check_provider_version_compat.py + language: python + types: [python] + files: ^providers/.*/src/airflow/providers/.*\.py$ + require_serial: true + - id: check-airflow-version-checks-in-core + language: pygrep + name: No AIRFLOW_V_* imports in airflow-core + entry: "import AIRFLOW_V_" + files: ^airflow-core/.*\.py$ + pass_filenames: true + # TODO (@amoghrajesh): revisit last few in this list as they all rely on versioned secrets masker imports + exclude: > + (?x) + ^airflow-core/tests/integration/otel/dags/otel_test_dag_with_pause_between_tasks\.py$| + ^airflow-core/tests/integration/otel/dags/otel_test_dag_with_pause_in_task\.py$| + ^airflow-core/tests/integration/otel/test_otel\.py$| + ^airflow-core/tests/unit/core/test_configuration\.py$| + ^airflow-core/tests/unit/models/test_renderedtifields\.py$| + ^airflow-core/tests/unit/models/test_variable\.py$ + - id: check-sdk-imports + name: Check for SDK imports in core files + entry: ./scripts/ci/pre_commit/check_sdk_imports.py + language: python + types: [python] + files: ^airflow-core/src/airflow/ + exclude: | + (?x) + # Allow SDK imports in these legitimate locations + ^airflow-core/src/airflow/example_dags/.*\.py$| + + # TODO: These files need to be refactored to remove SDK coupling + ^airflow-core/src/airflow/__init__\.py$| + ^airflow-core/src/airflow/models/__init__\.py$| + ^airflow-core/src/airflow/api/common/mark_tasks\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/datamodels/assets\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/services/public/connections\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections\.py$| + ^airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid\.py$| + ^airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl\.py$| + ^airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances\.py$| + ^airflow-core/src/airflow/api_fastapi/logging/decorators\.py$| + ^airflow-core/src/airflow/assets/evaluation\.py$| + ^airflow-core/src/airflow/assets/manager\.py$| + ^airflow-core/src/airflow/cli/commands/connection_command\.py$| + ^airflow-core/src/airflow/cli/commands/task_command\.py$| + ^airflow-core/src/airflow/configuration\.py$| + ^airflow-core/src/airflow/dag_processing/collection\.py$| + ^airflow-core/src/airflow/dag_processing/manager\.py$| + ^airflow-core/src/airflow/dag_processing/processor\.py$| + ^airflow-core/src/airflow/datasets/metadata\.py$| + ^airflow-core/src/airflow/exceptions\.py$| + ^airflow-core/src/airflow/executors/local_executor\.py$| + ^airflow-core/src/airflow/jobs/triggerer_job_runner\.py$| + ^airflow-core/src/airflow/lineage/hook\.py$| + ^airflow-core/src/airflow/listeners/spec/asset\.py$| + ^airflow-core/src/airflow/listeners/spec/taskinstance\.py$| + ^airflow-core/src/airflow/logging/remote\.py$| + ^airflow-core/src/airflow/models/asset\.py$| + ^airflow-core/src/airflow/models/baseoperator\.py$| + ^airflow-core/src/airflow/models/connection\.py$| + ^airflow-core/src/airflow/models/dag\.py$| + ^airflow-core/src/airflow/models/deadline\.py$| + ^airflow-core/src/airflow/models/dagbag\.py$| + ^airflow-core/src/airflow/models/dagrun\.py$| + ^airflow-core/src/airflow/models/mappedoperator\.py$| + ^airflow-core/src/airflow/models/operator\.py$| + ^airflow-core/src/airflow/models/param\.py$| + ^airflow-core/src/airflow/models/serialized_dag\.py$| + ^airflow-core/src/airflow/models/taskinstance\.py$| + ^airflow-core/src/airflow/models/taskinstancekey\.py$| + ^airflow-core/src/airflow/models/taskmap\.py$| + ^airflow-core/src/airflow/models/taskreschedule\.py$| + ^airflow-core/src/airflow/models/variable\.py$| + ^airflow-core/src/airflow/operators/subdag\.py$| + ^airflow-core/src/airflow/serialization/dag\.py$| + ^airflow-core/src/airflow/serialization/enums\.py$| + ^airflow-core/src/airflow/serialization/serialized_objects\.py$| + ^airflow-core/src/airflow/task/task_runner/bash_task_runner\.py$| + ^airflow-core/src/airflow/task/task_runner/standard_task_runner\.py$| + ^airflow-core/src/airflow/utils/dag_cycle_tester\.py$| + ^airflow-core/src/airflow/utils/dag_parsing_context\.py$| + ^airflow-core/src/airflow/utils/decorators\.py$| + ^airflow-core/src/airflow/utils/operator_helpers\.py$| + ^airflow-core/src/airflow/utils/session\.py$| + ^airflow-core/src/airflow/utils/task_group\.py$| + ^airflow-core/src/airflow/utils/trigger_rule\.py$| + ^airflow-core/src/airflow/utils/xcom\.py$| + ^airflow-core/src/airflow/providers_manager\.py$| + ^airflow-core/src/airflow/timetables/assets\.py$| + ^airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep\.py$| + ^airflow-core/src/airflow/utils/context\.py$| + ^airflow-core/src/airflow/models/taskmixin\.py$| + ^airflow-core/src/airflow/utils/edgemodifier\.py$| + ^airflow-core/src/airflow/utils/email\.py$| + ^airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep\.py$| + ^airflow-core/src/airflow/utils/helpers\.py$| + ^airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep\.py$| + ^airflow-core/src/airflow/utils/types\.py$| + ^airflow-core/src/airflow/utils/dag_edges\.py$| + ^airflow-core/src/airflow/utils/cli\.py$| + ^airflow-core/src/airflow/timetables/base\.py$| + ^airflow-core/src/airflow/utils/dot_renderer\.py$| + ^airflow-core/src/airflow/models/xcom_arg\.py$| + ^airflow-core/src/airflow/plugins_manager\.py$| + ^airflow-core/src/airflow/models/xcom\.py$| + ^airflow-core/src/airflow/timetables/simple\.py$| + ^airflow-core/src/airflow/settings\.py$| + ^airflow-core/src/airflow/models/renderedtifields\.py$| + ^airflow-core/src/airflow/serialization/helpers\.py$| + ^airflow-core/src/airflow/models/expandinput\.py$ + additional_dependencies: ['rich>=12.4.4'] ## ONLY ADD PRE-COMMITS HERE THAT REQUIRE CI IMAGE diff --git a/.rat-excludes b/.rat-excludes index 75a85f9873fa1..48939c413c9b3 100644 --- a/.rat-excludes +++ b/.rat-excludes @@ -160,10 +160,22 @@ PKG-INFO # Openapi files .openapi-generator-ignore version.txt -v1*.yaml +v2*.yaml _private_ui*.yaml # Front end generated files api-generated.ts openapi-gen pnpm-lock.yaml + +# python generated file +generated.py +auth_generated.py + +# hash files +www-hash.txt + +# go setup files +go.mod +go.sum +mocks/* diff --git a/.readthedocs.yml b/.readthedocs.yml index c276d282294ca..ddc2ffd3681fe 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ formats: [] sphinx: configuration: devel-common/src/docs/rtd-deprecation/conf.py python: - version: "3.9" + version: "3.10" install: - method: pip path: . diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000000000..724896d268710 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,47 @@ + + +# AGENTS instructions + +The main developer documentation lives in the `contributing-docs` directory. The following points summarise +how to set up the environment, run checks, build docs and follow the PR workflow. + +## Local virtualenv and Breeze + +- [`07_local_virtualenv.rst`](contributing-docs/07_local_virtualenv.rst) explains how to prepare a local Python environment using `uv`. The tool creates and syncs a `.venv` and installs dependencies with commands such as `uv venv` and `uv sync`. +- [`06_development_environments.rst`](contributing-docs/06_development_environments.rst) compares the local virtualenv with the Docker based Breeze environment. Breeze replicates CI and includes services like databases for integration tests. + +## Pre-commit hooks + +- Installation and usage of `pre-commit` are described in [`03_contributors_quick_start.rst`](contributing-docs/03_contributors_quick_start.rst). Install with `uv tool install pre-commit --with pre-commit-uv` and run checks via `pre-commit run --all-files`. +- [`08_static_code_checks.rst`](contributing-docs/08_static_code_checks.rst) provides more details on the available hooks and prerequisites. Enable the hooks with `pre-commit install` so they run automatically on each commit. + +## Running tests + +- [`03_contributors_quick_start.rst`](contributing-docs/03_contributors_quick_start.rst) shows running tests inside Breeze. Use `pytest` inside the container for individual files or invoke `breeze testing` commands to run full suites, e.g. `breeze --backend postgres --python 3.10 testing tests --test-type All`. + +## Building documentation + +- Documentation can be built locally using `uv run --group docs build-docs` as described in [`11_documentation_building.rst`](contributing-docs/11_documentation_building.rst). Within Breeze the equivalent command is `breeze build-docs`. + +## Pull request guidelines + +- Follow the PR guidance in [`05_pull_requests.rst`](contributing-docs/05_pull_requests.rst). Always add tests, keep your branch rebased instead of merged, and adhere to the commit message recommendations from [cbea.ms/git-commit](https://cbea.ms/git-commit/). + +For advanced topics such as packaging providers and API versioning see [`12_provider_distributions.rst`](contributing-docs/12_provider_distributions.rst) and [`19_execution_api_versioning.rst`](contributing-docs/19_execution_api_versioning.rst). diff --git a/Dockerfile b/Dockerfile index 37ac245da5b20..de7624482e862 100644 --- a/Dockerfile +++ b/Dockerfile @@ -46,18 +46,18 @@ ARG AIRFLOW_UID="50000" ARG AIRFLOW_USER_HOME_DIR=/home/airflow # latest released version here -ARG AIRFLOW_VERSION="2.10.5" +ARG AIRFLOW_VERSION="3.0.4" -ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" +ARG PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" # You can swap comments between those two args to test pip from the main version # When you attempt to test if the version of `pip` from specified branch works for our builds # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` -ARG AIRFLOW_PIP_VERSION=25.0.1 +ARG AIRFLOW_PIP_VERSION=25.2 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_SETUPTOOLS_VERSION=78.1.0 -ARG AIRFLOW_UV_VERSION=0.6.13 +ARG AIRFLOW_SETUPTOOLS_VERSION=80.9.0 +ARG AIRFLOW_UV_VERSION=0.8.9 ARG AIRFLOW_USE_UV="false" ARG UV_HTTP_TIMEOUT="300" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" @@ -137,7 +137,7 @@ function get_runtime_apt_deps() { echo if [[ "${RUNTIME_APT_DEPS=}" == "" ]]; then RUNTIME_APT_DEPS="apt-transport-https apt-utils ca-certificates \ -curl dumb-init freetds-bin krb5-user libev4 libgeos-dev \ +curl dumb-init freetds-bin git krb5-user libev4 libgeos-dev \ ldap-utils libsasl2-2 libsasl2-modules libxmlsec1 locales ${debian_version_apt_deps} \ lsb-release openssh-client python3-selinux rsync sasl2-bin sqlite3 sudo unixodbc" export RUNTIME_APT_DEPS @@ -232,6 +232,24 @@ readonly MARIADB_LTS_VERSION="10.11" : "${INSTALL_MYSQL_CLIENT:?Should be true or false}" : "${INSTALL_MYSQL_CLIENT_TYPE:-mariadb}" +retry() { + local retries=3 + local count=0 + # adding delay of 10 seconds + local delay=10 + until "$@"; do + exit_code=$? + count=$((count + 1)) + if [[ $count -lt $retries ]]; then + echo "Command failed. Attempt $count/$retries. Retrying in ${delay}s..." + sleep $delay + else + echo "Command failed after $retries attempts." + return $exit_code + fi + done +} + install_mysql_client() { if [[ "${1}" == "dev" ]]; then packages=("libmysqlclient-dev" "mysql-client") @@ -257,8 +275,8 @@ install_mysql_client() { echo "deb http://repo.mysql.com/apt/debian/ $(lsb_release -cs) mysql-${MYSQL_LTS_VERSION}" > \ /etc/apt/sources.list.d/mysql.list - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* @@ -302,8 +320,8 @@ install_mariadb_client() { /etc/apt/sources.list.d/mariadb.list # Make sure that dependencies from MariaDB repo are preferred over Debian dependencies printf "Package: *\nPin: release o=MariaDB\nPin-Priority: 999\n" > /etc/apt/preferences.d/mariadb - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* } @@ -455,14 +473,22 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" - export EXTRA_INSTALL_FLAGS="--group=dev" + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." && -f "./pyproject.toml" ]]; then + # for uv only install dev group when we install from sources + export EXTRA_INSTALL_FLAGS="--group=dev --no-binary lxml --no-binary xmlsec" + else + export EXTRA_INSTALL_FLAGS="--no-binary lxml --no-binary xmlsec" + fi export EXTRA_UNINSTALL_FLAGS="" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --resolution highest" export UPGRADE_IF_NEEDED="--upgrade" UV_CONCURRENT_DOWNLOADS=$(nproc --all) export UV_CONCURRENT_DOWNLOADS if [[ ${INCLUDE_PRE_RELEASE=} == "true" ]]; then - EXTRA_INSTALL_FLAGS="${EXTRA_INSTALL_FLAGS} --prerelease allow" + EXTRA_INSTALL_FLAGS="${EXTRA_INSTALL_FLAGS} --prerelease if-necessary" fi else echo @@ -470,7 +496,10 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="pip" export PACKAGING_TOOL_CMD="pip" - export EXTRA_INSTALL_FLAGS="--root-user-action ignore" + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + export EXTRA_INSTALL_FLAGS="--root-user-action ignore --no-binary lxml,xmlsec" export EXTRA_UNINSTALL_FLAGS="--yes" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --upgrade-strategy eager" export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed" @@ -491,7 +520,7 @@ function common::get_airflow_version_specification() { function common::get_constraints_location() { # auto-detect Airflow-constraint reference and location if [[ -z "${AIRFLOW_CONSTRAINTS_REFERENCE=}" ]]; then - if [[ ${AIRFLOW_VERSION} =~ v?2.* && ! ${AIRFLOW_VERSION} =~ .*dev.* ]]; then + if [[ ${AIRFLOW_VERSION} =~ v?2.* || ${AIRFLOW_VERSION} =~ v?3.* ]]; then AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION} else AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH} @@ -650,7 +679,7 @@ if [[ $(id -u) == "0" ]]; then echo echo "${COLOR_RED}You are running pip as root. Please use 'airflow' user to run pip!${COLOR_RESET}" echo - echo "${COLOR_YELLOW}See: https://airflow.apache.org/docs/docker-stack/build.html#adding-a-new-pypi-package${COLOR_RESET}" + echo "${COLOR_YELLOW}See: https://airflow.apache.org/docs/docker-stack/build.html#adding-new-pypi-packages-individually${COLOR_RESET}" echo exit 1 fi @@ -852,8 +881,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Attempting to upgrade all packages to highest versions.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec else # We only use uv here but Installing using constraints is not supported with `uv sync`, so we # do not use ``uv sync`` because we are not committing and using uv.lock yet. @@ -871,6 +904,7 @@ function install_from_sources() { installation_command_flags=" --editable .[${AIRFLOW_EXTRAS}] \ --editable ./airflow-core --editable ./task-sdk --editable ./airflow-ctl \ --editable ./kubernetes-tests --editable ./docker-tests --editable ./helm-tests \ + --editable ./task-sdk-tests \ --editable ./devel-common[all] --editable ./dev \ --group dev --group docs --group docs-gen --group leveldb" local -a projects_with_devel_dependencies @@ -910,8 +944,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Falling back to no-constraints installation.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec set +x fi fi @@ -1290,7 +1328,7 @@ function check_uid_gid() { >&2 echo " This is to make sure you can run the image with an arbitrary UID in the future." >&2 echo >&2 echo " See more about it in the Airflow's docker image documentation" - >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + >&2 echo " https://airflow.apache.org/docs/docker-stack/entrypoint.html" >&2 echo # We still allow the image to run with `airflow` user. return @@ -1304,7 +1342,7 @@ function check_uid_gid() { >&2 echo " This is to make sure you can run the image with an arbitrary UID." >&2 echo >&2 echo " See more about it in the Airflow's docker image documentation" - >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + >&2 echo " https://airflow.apache.org/docs/docker-stack/entrypoint.html" # This will not work so we fail hard exit 1 fi @@ -1599,12 +1637,12 @@ COPY --chown=airflow:0 ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO} ARG ADDITIONAL_PYTHON_DEPS="" -ARG VERSION_SUFFIX_FOR_PYPI="" +ARG VERSION_SUFFIX="" ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS} \ INSTALL_DISTRIBUTIONS_FROM_CONTEXT=${INSTALL_DISTRIBUTIONS_FROM_CONTEXT} \ USE_CONSTRAINTS_FOR_CONTEXT_DISTRIBUTIONS=${USE_CONSTRAINTS_FOR_CONTEXT_DISTRIBUTIONS} \ - VERSION_SUFFIX_FOR_PYPI=${VERSION_SUFFIX_FOR_PYPI} + VERSION_SUFFIX=${VERSION_SUFFIX} WORKDIR ${AIRFLOW_HOME} diff --git a/Dockerfile.ci b/Dockerfile.ci index b722ff0132935..967e2ebfebc96 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -16,13 +16,13 @@ # # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. # -ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" +ARG BASE_IMAGE="debian:bookworm-slim" ############################################################################################## # This is the script image where we keep all inlined bash scripts needed in other segments -# We use PYTHON_BASE_IMAGE to make sure that the scripts are different for different platforms. +# We use BASE_IMAGE to make sure that the scripts are different for different platforms. ############################################################################################## -FROM ${PYTHON_BASE_IMAGE} as scripts +FROM ${BASE_IMAGE} as scripts ############################################################################################## # Please DO NOT modify the inlined scripts manually. The content of those files will be @@ -31,22 +31,27 @@ FROM ${PYTHON_BASE_IMAGE} as scripts # make the PROD Dockerfile standalone ############################################################################################## -# The content below is automatically copied from scripts/docker/install_os_dependencies.sh -COPY <<"EOF" /install_os_dependencies.sh +# The content below is automatically copied from scripts/docker/install_os_dependencies_ci.sh +COPY <<"EOF" /install_os_dependencies_ci.sh #!/usr/bin/env bash set -euo pipefail if [[ "$#" != 1 ]]; then - echo "ERROR! There should be 'runtime' or 'dev' parameter passed as argument.". + echo "ERROR! There should be 'runtime', 'ci' or 'dev' parameter passed as argument.". exit 1 fi +AIRFLOW_PYTHON_VERSION=${AIRFLOW_PYTHON_VERSION:-v3.10.10} +GOLANG_MAJOR_MINOR_VERSION=${GOLANG_MAJOR_MINOR_VERSION:-1.24.4} + if [[ "${1}" == "runtime" ]]; then INSTALLATION_TYPE="RUNTIME" elif [[ "${1}" == "dev" ]]; then - INSTALLATION_TYPE="dev" + INSTALLATION_TYPE="DEV" +elif [[ "${1}" == "ci" ]]; then + INSTALLATION_TYPE="CI" else - echo "ERROR! Wrong argument. Passed ${1} and it should be one of 'runtime' or 'dev'.". + echo "ERROR! Wrong argument. Passed ${1} and it should be one of 'runtime', 'ci' or 'dev'.". exit 1 fi @@ -56,7 +61,10 @@ function get_dev_apt_deps() { freetds-bin freetds-dev git graphviz graphviz-dev krb5-user ldap-utils libev4 libev-dev libffi-dev libgeos-dev \ libkrb5-dev libldap2-dev libleveldb1d libleveldb-dev libsasl2-2 libsasl2-dev libsasl2-modules \ libssl-dev libxmlsec1 libxmlsec1-dev locales lsb-release openssh-client pkgconf sasl2-bin \ -software-properties-common sqlite3 sudo unixodbc unixodbc-dev zlib1g-dev" +software-properties-common sqlite3 sudo unixodbc unixodbc-dev zlib1g-dev \ +gdb lcov pkg-config libbz2-dev libgdbm-dev libgdbm-compat-dev liblzma-dev \ +libncurses5-dev libreadline6-dev libsqlite3-dev lzma lzma-dev tk-dev uuid-dev \ +libzstd-dev" export DEV_APT_DEPS fi } @@ -76,7 +84,7 @@ function get_runtime_apt_deps() { echo if [[ "${RUNTIME_APT_DEPS=}" == "" ]]; then RUNTIME_APT_DEPS="apt-transport-https apt-utils ca-certificates \ -curl dumb-init freetds-bin krb5-user libev4 libgeos-dev \ +curl dumb-init freetds-bin git krb5-user libev4 libgeos-dev \ ldap-utils libsasl2-2 libsasl2-modules libxmlsec1 locales ${debian_version_apt_deps} \ lsb-release openssh-client python3-selinux rsync sasl2-bin sqlite3 sudo unixodbc" export RUNTIME_APT_DEPS @@ -143,14 +151,36 @@ function install_debian_runtime_dependencies() { rm -rf /var/lib/apt/lists/* /var/log/* } +function install_python() { + git clone --branch "${AIRFLOW_PYTHON_VERSION}" --depth 1 https://github.com/python/cpython.git + cd cpython + ./configure --enable-optimizations + make -s -j "$(nproc)" all + make -s -j "$(nproc)" install + ln -s /usr/local/bin/python3 /usr/local/bin/python + ln -s /usr/local/bin/pip3 /usr/local/bin/pip + cd .. + rm -rf cpython +} + +function install_golang() { + curl "https://dl.google.com/go/go${GOLANG_MAJOR_MINOR_VERSION}.linux-$(dpkg --print-architecture).tar.gz" -o "go${GOLANG_MAJOR_MINOR_VERSION}.linux.tar.gz" + rm -rf /usr/local/go && tar -C /usr/local -xzf go"${GOLANG_MAJOR_MINOR_VERSION}".linux.tar.gz +} + if [[ "${INSTALLATION_TYPE}" == "RUNTIME" ]]; then get_runtime_apt_deps install_debian_runtime_dependencies install_docker_cli else + get_dev_apt_deps install_debian_dev_dependencies + install_python + if [[ "${INSTALLATION_TYPE}" == "CI" ]]; then + install_golang + fi install_docker_cli fi EOF @@ -171,6 +201,24 @@ readonly MARIADB_LTS_VERSION="10.11" : "${INSTALL_MYSQL_CLIENT:?Should be true or false}" : "${INSTALL_MYSQL_CLIENT_TYPE:-mariadb}" +retry() { + local retries=3 + local count=0 + # adding delay of 10 seconds + local delay=10 + until "$@"; do + exit_code=$? + count=$((count + 1)) + if [[ $count -lt $retries ]]; then + echo "Command failed. Attempt $count/$retries. Retrying in ${delay}s..." + sleep $delay + else + echo "Command failed after $retries attempts." + return $exit_code + fi + done +} + install_mysql_client() { if [[ "${1}" == "dev" ]]; then packages=("libmysqlclient-dev" "mysql-client") @@ -196,8 +244,8 @@ install_mysql_client() { echo "deb http://repo.mysql.com/apt/debian/ $(lsb_release -cs) mysql-${MYSQL_LTS_VERSION}" > \ /etc/apt/sources.list.d/mysql.list - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* @@ -241,8 +289,8 @@ install_mariadb_client() { /etc/apt/sources.list.d/mariadb.list # Make sure that dependencies from MariaDB repo are preferred over Debian dependencies printf "Package: *\nPin: release o=MariaDB\nPin-Priority: 999\n" > /etc/apt/preferences.d/mariadb - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* } @@ -394,14 +442,22 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" - export EXTRA_INSTALL_FLAGS="--group=dev" + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." && -f "./pyproject.toml" ]]; then + # for uv only install dev group when we install from sources + export EXTRA_INSTALL_FLAGS="--group=dev --no-binary lxml --no-binary xmlsec" + else + export EXTRA_INSTALL_FLAGS="--no-binary lxml --no-binary xmlsec" + fi export EXTRA_UNINSTALL_FLAGS="" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --resolution highest" export UPGRADE_IF_NEEDED="--upgrade" UV_CONCURRENT_DOWNLOADS=$(nproc --all) export UV_CONCURRENT_DOWNLOADS if [[ ${INCLUDE_PRE_RELEASE=} == "true" ]]; then - EXTRA_INSTALL_FLAGS="${EXTRA_INSTALL_FLAGS} --prerelease allow" + EXTRA_INSTALL_FLAGS="${EXTRA_INSTALL_FLAGS} --prerelease if-necessary" fi else echo @@ -409,7 +465,10 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="pip" export PACKAGING_TOOL_CMD="pip" - export EXTRA_INSTALL_FLAGS="--root-user-action ignore" + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + export EXTRA_INSTALL_FLAGS="--root-user-action ignore --no-binary lxml,xmlsec" export EXTRA_UNINSTALL_FLAGS="--yes" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --upgrade-strategy eager" export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed" @@ -430,7 +489,7 @@ function common::get_airflow_version_specification() { function common::get_constraints_location() { # auto-detect Airflow-constraint reference and location if [[ -z "${AIRFLOW_CONSTRAINTS_REFERENCE=}" ]]; then - if [[ ${AIRFLOW_VERSION} =~ v?2.* && ! ${AIRFLOW_VERSION} =~ .*dev.* ]]; then + if [[ ${AIRFLOW_VERSION} =~ v?2.* || ${AIRFLOW_VERSION} =~ v?3.* ]]; then AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION} else AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH} @@ -605,8 +664,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Attempting to upgrade all packages to highest versions.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec else # We only use uv here but Installing using constraints is not supported with `uv sync`, so we # do not use ``uv sync`` because we are not committing and using uv.lock yet. @@ -624,6 +687,7 @@ function install_from_sources() { installation_command_flags=" --editable .[${AIRFLOW_EXTRAS}] \ --editable ./airflow-core --editable ./task-sdk --editable ./airflow-ctl \ --editable ./kubernetes-tests --editable ./docker-tests --editable ./helm-tests \ + --editable ./task-sdk-tests \ --editable ./devel-common[all] --editable ./dev \ --group dev --group docs --group docs-gen --group leveldb" local -a projects_with_devel_dependencies @@ -663,8 +727,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Falling back to no-constraints installation.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec set +x fi fi @@ -804,11 +872,15 @@ EOF # The content below is automatically copied from scripts/docker/entrypoint_ci.sh COPY <<"EOF" /entrypoint_ci.sh #!/usr/bin/env bash -if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then - set -x -fi - +function set_verbose() { + if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then + set -x + else + set +x + fi +} +set_verbose . "${AIRFLOW_SOURCES:-/opt/airflow}"/scripts/in_container/_in_container_script_init.sh LD_PRELOAD="/usr/lib/$(uname -m)-linux-gnu/libstdc++.so.6" @@ -818,7 +890,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.10} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} @@ -826,6 +898,10 @@ mkdir "${AIRFLOW_HOME}/sqlite" -p || true ASSET_COMPILATION_WAIT_MULTIPLIER=${ASSET_COMPILATION_WAIT_MULTIPLIER:=1} +if [[ "${CI=}" == "true" ]]; then + export COLUMNS="202" +fi + . "${IN_CONTAINER_DIR}/check_connectivity.sh" function wait_for_asset_compilation() { @@ -899,7 +975,7 @@ function environment_initialization() { CI=${CI:="false"} # Added to have run-tests on path - export PATH=${PATH}:${AIRFLOW_SOURCES} + export PATH=${PATH}:${AIRFLOW_SOURCES}:/usr/local/go/bin/ mkdir -pv "${AIRFLOW_HOME}/logs/" @@ -908,6 +984,11 @@ function environment_initialization() { set +e + # shellcheck source=scripts/in_container/configure_environment.sh + . "${IN_CONTAINER_DIR}/configure_environment.sh" + # shellcheck source=scripts/in_container/run_init_script.sh + . "${IN_CONTAINER_DIR}/run_init_script.sh" + "${IN_CONTAINER_DIR}/check_environment.sh" ENVIRONMENT_EXIT_CODE=$? set -e @@ -917,6 +998,7 @@ function environment_initialization() { echo exit ${ENVIRONMENT_EXIT_CODE} fi + mkdir -p /usr/lib/google-cloud-sdk/bin touch /usr/lib/google-cloud-sdk/bin/gcloud ln -s -f /usr/bin/gcloud /usr/lib/google-cloud-sdk/bin/gcloud @@ -942,14 +1024,14 @@ function environment_initialization() { ssh-keyscan -H localhost >> ~/.ssh/known_hosts 2>/dev/null fi - # shellcheck source=scripts/in_container/configure_environment.sh - . "${IN_CONTAINER_DIR}/configure_environment.sh" - - # shellcheck source=scripts/in_container/run_init_script.sh - . "${IN_CONTAINER_DIR}/run_init_script.sh" - cd "${AIRFLOW_SOURCES}" + # Temporarily add /opt/airflow/providers/standard/tests to PYTHONPATH in order to see example dags + # in the UI when testing in Breeze. This might be solved differently in the future + if [[ -d /opt/airflow/providers/standard/tests ]]; then + export PYTHONPATH=${PYTHONPATH=}:/opt/airflow/providers/standard/tests + fi + if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} wait_for_asset_compilation @@ -963,13 +1045,13 @@ function handle_mount_sources() { echo echo "${COLOR_BLUE}Mounted sources are removed, cleaning up mounted dist-info files${COLOR_RESET}" echo - rm -rf /usr/local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/apache_airflow*.dist-info/ + rm -rf /usr/local/lib/python"${PYTHON_MAJOR_MINOR_VERSION}"/site-packages/apache_airflow*.dist-info/ fi } function determine_airflow_to_use() { USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" - if [[ ${USE_AIRFLOW_VERSION} == "" && ${USE_DISTRIBUTIONS_FROM_DIST=} != "true" ]]; then + if [[ "${USE_AIRFLOW_VERSION}" == "" && "${USE_DISTRIBUTIONS_FROM_DIST}" != "true" ]]; then export PYTHONPATH=${AIRFLOW_SOURCES} echo echo "${COLOR_BLUE}Using airflow version from current sources${COLOR_RESET}" @@ -985,7 +1067,7 @@ function determine_airflow_to_use() { echo "${COLOR_BLUE}Uninstalling all packages first${COLOR_RESET}" echo # shellcheck disable=SC2086 - ${PACKAGING_TOOL_CMD} freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | \ + ${PACKAGING_TOOL_CMD} freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | grep -v "@" | \ xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} # Now install rich ad click first to use the installation script # shellcheck disable=SC2086 @@ -997,8 +1079,10 @@ function determine_airflow_to_use() { echo # Use uv run to install necessary dependencies automatically # in the future we will be able to use uv sync when `uv.lock` is supported - uv run /opt/airflow/scripts/in_container/install_development_dependencies.py \ - --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt + # for the use in parallel runs in docker containers--no-cache is needed - otherwise there is + # possibility of overriding temporary environments by multiple parallel processes + uv run --no-cache /opt/airflow/scripts/in_container/install_development_dependencies.py \ + --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-"${PYTHON_MAJOR_MINOR_VERSION}".txt # Some packages might leave legacy typing module which causes test issues # shellcheck disable=SC2086 ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} typing || true @@ -1025,12 +1109,22 @@ function check_boto_upgrade() { echo echo "${COLOR_BLUE}Upgrading boto3, botocore to latest version to run Amazon tests with them${COLOR_RESET}" echo - set -x # shellcheck disable=SC2086 ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} aiobotocore s3fs || true # shellcheck disable=SC2086 - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade boto3 botocore - set +x + ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "boto3<1.38.3" "botocore<1.38.3" +} + +function check_upgrade_sqlalchemy() { + # The python version constraint is a TEMPORARY WORKAROUND to exclude all FAB tests. Is should be removed once we + # upgrade FAB to v5 (PR #50960). + if [[ "${UPGRADE_SQLALCHEMY}" != "true" || ${PYTHON_MAJOR_MINOR_VERSION} != "3.13" ]]; then + return + fi + echo + echo "${COLOR_BLUE}Upgrading sqlalchemy to the latest version to run tests with it${COLOR_RESET}" + echo + uv sync --all-packages --no-install-package apache-airflow-providers-fab --resolution highest } function check_downgrade_sqlalchemy() { @@ -1038,12 +1132,12 @@ function check_downgrade_sqlalchemy() { return fi local min_sqlalchemy_version - min_sqlalchemy_version=$(grep "sqlalchemy>=" airflow-core/pyproject.toml | sed "s/.*>=\([0-9\.]*\).*/\1/" | xargs) + min_sqlalchemy_version=$(grep "sqlalchemy\[asyncio\]>=" airflow-core/pyproject.toml | sed "s/.*>=\([0-9\.]*\).*/\1/" | xargs) echo echo "${COLOR_BLUE}Downgrading sqlalchemy to minimum supported version: ${min_sqlalchemy_version}${COLOR_RESET}" echo # shellcheck disable=SC2086 - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "sqlalchemy==${min_sqlalchemy_version}" + ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "sqlalchemy[asyncio]==${min_sqlalchemy_version}" pip check } @@ -1066,13 +1160,6 @@ function check_run_tests() { return fi - if [[ ${REMOVE_ARM_PACKAGES:="false"} == "true" ]]; then - # Test what happens if we do not have ARM packages installed. - # This is useful to see if pytest collection works without ARM packages which is important - # for the MacOS M1 users running tests in their ARM machines with `breeze testing *-tests` command - python "${IN_CONTAINER_DIR}/remove_arm_packages.py" - fi - if [[ ${TEST_GROUP:=""} == "system" ]]; then exec "${IN_CONTAINER_DIR}/run_system_tests.sh" "${@}" else @@ -1098,13 +1185,19 @@ function check_force_lowest_dependencies() { exit 0 fi cd "${AIRFLOW_SOURCES}/providers/${provider_id/.//}" || exit 1 - uv sync --resolution lowest-direct + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec --all-extras else echo echo "${COLOR_BLUE}Forcing dependencies to lowest versions for Airflow.${COLOR_RESET}" echo cd "${AIRFLOW_SOURCES}/airflow-core" - uv sync --resolution lowest-direct + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec --all-extras fi } @@ -1115,23 +1208,32 @@ function check_airflow_python_client_installation() { python "${IN_CONTAINER_DIR}/install_airflow_python_client.py" } +function initialize_db() { + # If we are going to start the api server OR we are a system test (which may or may not start the api server, + # depending on the Airflow version being used to run the tests), then migrate the DB. + if [[ ${START_API_SERVER_WITH_EXAMPLES=} == "true" || ${TEST_GROUP:=""} == "system" ]]; then + echo + echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" + echo + airflow db migrate + echo + echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" + fi +} + function start_api_server_with_examples(){ - # check if we should not start the api server with examples by checking if both - # START_API_SERVER_WITH_EXAMPLES is false AND the TEST_GROUP env var is not equal to "system" + USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" + # Do not start the api server if either START_API_SERVER_WITH_EXAMPLES is false or the TEST_GROUP env var is not + # equal to "system". if [[ ${START_API_SERVER_WITH_EXAMPLES=} != "true" && ${TEST_GROUP:=""} != "system" ]]; then return fi + # If the use Airflow version is set and it is <= 3.0.0 (which does not have the API server anyway) also return + if [[ ${USE_AIRFLOW_VERSION} != "" && ${USE_AIRFLOW_VERSION} < "3.0.0" ]]; then + return + fi export AIRFLOW__CORE__LOAD_EXAMPLES=True - export AIRFLOW__WEBSERVER__EXPOSE_CONFIG=True - echo - echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" - echo - airflow db migrate - echo - echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" - echo - echo "${COLOR_BLUE}Parsing example dags${COLOR_RESET}" - echo + export AIRFLOW__API__EXPOSE_CONFIG=True airflow dags reserialize echo "Example dags parsing finished" if airflow config get-value core auth_manager | grep -q "FabAuthManager"; then @@ -1164,10 +1266,12 @@ handle_mount_sources determine_airflow_to_use environment_initialization check_boto_upgrade +check_upgrade_sqlalchemy check_downgrade_sqlalchemy check_downgrade_pendulum check_force_lowest_dependencies check_airflow_python_client_installation +initialize_db start_api_server_with_examples check_run_tests "${@}" @@ -1186,23 +1290,23 @@ COPY <<"EOF" /entrypoint_exec.sh exec /bin/bash "${@}" EOF -FROM ${PYTHON_BASE_IMAGE} as main +FROM ${BASE_IMAGE} as main # Nolog bash flag is currently ignored - but you can replace it with other flags (for example # xtrace - to show commands executed) SHELL ["/bin/bash", "-o", "pipefail", "-o", "errexit", "-o", "nounset", "-o", "nolog", "-c"] -ARG PYTHON_BASE_IMAGE +ARG BASE_IMAGE ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" # By increasing this number we can do force build of all dependencies. # NOTE! When you want to make sure dependencies are installed from scratch in your PR after removing # some dependencies, you also need to set "disable image cache" in your PR to make sure the image is # not built using the "main" version of those dependencies. -ARG DEPENDENCIES_EPOCH_NUMBER="14" +ARG DEPENDENCIES_EPOCH_NUMBER="15" # Make sure noninteractive debian install is used and language variables set -ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ +ENV BASE_IMAGE=${BASE_IMAGE} \ DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 \ DEPENDENCIES_EPOCH_NUMBER=${DEPENDENCIES_EPOCH_NUMBER} \ @@ -1213,7 +1317,7 @@ ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ UV_CACHE_DIR=/root/.cache/uv -RUN echo "Base image version: ${PYTHON_BASE_IMAGE}" +RUN echo "Base image version: ${BASE_IMAGE}" ARG DEV_APT_COMMAND="" ARG ADDITIONAL_DEV_APT_COMMAND="" @@ -1228,8 +1332,13 @@ ENV DEV_APT_COMMAND=${DEV_APT_COMMAND} \ ADDITIONAL_DEV_APT_DEPS=${ADDITIONAL_DEV_APT_DEPS} \ ADDITIONAL_DEV_APT_COMMAND=${ADDITIONAL_DEV_APT_COMMAND} -COPY --from=scripts install_os_dependencies.sh /scripts/docker/ -RUN bash /scripts/docker/install_os_dependencies.sh dev +ARG AIRFLOW_PYTHON_VERSION=v3.10.18 +ENV AIRFLOW_PYTHON_VERSION=$AIRFLOW_PYTHON_VERSION +ENV GOLANG_MAJOR_MINOR_VERSION=1.24.6 + +COPY --from=scripts install_os_dependencies_ci.sh /scripts/docker/ + +RUN bash /scripts/docker/install_os_dependencies_ci.sh ci COPY --from=scripts common.sh /scripts/docker/ @@ -1353,12 +1462,12 @@ COPY --from=scripts common.sh install_packaging_tools.sh install_additional_depe # You can swap comments between those two args to test pip from the main version # When you attempt to test if the version of `pip` from specified branch works for our builds # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` -ARG AIRFLOW_PIP_VERSION=25.0.1 +ARG AIRFLOW_PIP_VERSION=25.2 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_SETUPTOOLS_VERSION=78.1.0 -ARG AIRFLOW_UV_VERSION=0.6.13 +ARG AIRFLOW_SETUPTOOLS_VERSION=80.9.0 +ARG AIRFLOW_UV_VERSION=0.8.9 # TODO(potiuk): automate with upgrade check (possibly) -ARG AIRFLOW_PRE_COMMIT_VERSION="4.2.0" +ARG AIRFLOW_PRE_COMMIT_VERSION="4.3.0" ARG AIRFLOW_PRE_COMMIT_UV_VERSION="4.1.4" ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ @@ -1368,8 +1477,8 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ UV_LINK_MODE=copy \ AIRFLOW_PRE_COMMIT_VERSION=${AIRFLOW_PRE_COMMIT_VERSION} -# The PATH is needed for PIPX to find the tools installed -ENV PATH="/root/.local/bin:${PATH}" +# The PATH is needed for PIPX to find the tools installed and cargo to build the wheels +ENV PATH="/root/.local/bin:/root/.cargo/bin:${PATH}" # Useful for creating a cache id based on the underlying architecture, preventing the use of cached python packages from # an incorrect architecture. @@ -1391,10 +1500,10 @@ COPY --from=scripts install_airflow_when_building_images.sh /scripts/docker/ COPY . ${AIRFLOW_SOURCES}/ ARG UPGRADE_RANDOM_INDICATOR_STRING="" -ARG VERSION_SUFFIX_FOR_PYPI="" +ARG VERSION_SUFFIX="" ENV UPGRADE_RANDOM_INDICATOR_STRING=${UPGRADE_RANDOM_INDICATOR_STRING} \ - VERSION_SUFFIX_FOR_PYPI=${VERSION_SUFFIX_FOR_PYPI} + VERSION_SUFFIX=${VERSION_SUFFIX} # The goal of this line is to install the dependencies from the most current pyproject.toml from sources # This will be usually incremental small set of packages in CI optimized build, so it will be very fast diff --git a/INSTALL b/INSTALL index b7a6a51602623..59e596ca79350 100644 --- a/INSTALL +++ b/INSTALL @@ -229,15 +229,15 @@ to avoid "works-for-me" syndrome, where you use different versions of dependenci that are used in main CI tests and by other contributors. There are different constraint files for different Python versions. For example, this command will install -all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.9: +all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.10: uv pip install -e ".[devel,google]"" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.10.txt" Using the 'constraints-no-providers' constraint files, you can upgrade Airflow without paying attention to the provider's dependencies. This allows you to keep installed provider dependencies and install the latest supported ones using pure Airflow core. uv pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.10.txt" Note that you can also use `pip install` if you do not use `uv`. diff --git a/INSTALLING.md b/INSTALLING.md new file mode 100644 index 0000000000000..e62b66a7a2a0e --- /dev/null +++ b/INSTALLING.md @@ -0,0 +1,103 @@ + + +## Local Development Setup + +This section outlines a recommended approach for setting up a local development environment for Apache Airflow on macOS and Linux, primarily using PyEnv for Python version management. + +> ⚠️ Avoid using either system-installed Python or Python from Homebrew, as these versions are often labeled `--externally-managed` resulting in restricted dependency installation. + +You can use other ways to install Python and airflow. Airflow development setup requires `uv` and if you want to setup environment for development, `uv` is the only supported local development environment setup, because we are using `uv workspace` extensively. See [local virtualenv setup in contributing docs](https://github.com/apache/airflow/blob/main/contributing-docs/07_local_virtualenv.rst) for details. + +If you are just installing airflow to run it locally, You can use other ways to set up your Python and virtualenv: `uv` is one of the options (refer to `uv` documentation), but you can also use more traditional tools - for example `pyenv`. Note that it is recommended to install airflow with constraints - at least initially - because this way you can reproducibly install airflow. See [Installation from PyPI](https://airflow.apache.org/docs/apache-airflow/stable/installation/installing-from-pypi.html) for more details. + +### ✅ Setup using pyenv: + +1. **Install pyenv (macOS and Linux)**: + +```bash +brew install pyenv +``` + +(Note: Homebrew is the recommended method on macOS. For Linux, you can typically install pyenv using the `pyenv-installer` script as detailed in the official documentation: [https://github.com/pyenv/pyenv#installation](https://github.com/pyenv/pyenv#installation).) + +2. **Install Python**: + +```bash +pyenv install 3.11.9 +pyenv global 3.11.9 +``` + +3. **Check Python version**: + +```bash +python --version +``` + +4. **Create and Activate a Virtual Environment**: Since Apache Airflow requires multiple dependencies, it's a good practice to isolate these dependencies in a virtual environment. + +- Create a virtual environment: + +```bash +python -m venv airflow_venv +``` + +- Activate the virtual environment: + +```bash +source airflow_venv/bin/activate +``` + +5. **Install Apache Airflow**: Apache Airflow is available on PyPI. To install it, you can use the following command in your terminal: + +```bash +pip install apache-airflow==3.0.0 --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.0/constraints-3.11.txt" +``` + +Note that installing with constraints - at least initially - is recommended for reproducible installation. It might sometimes happen that 3rd-party distributions are released and their latest versions break airflow. Using constraints makes the installation reproducible with versions of dependencies that were "frozen" at the time of releasing airflow. Note you have to specify both - airflow version and Python version you are using. + +You can also specify additional extras - when you want to install airflow with additional providers: + +```bash +pip install apache-airflow[amazon,google]==3.0.0 --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.0/constraints-3.11.txt" +``` + +6. **Set the AIRFLOW_HOME Environment Variable**: Apache Airflow requires a directory to store configuration files, logs, and other data. Set the AIRFLOW_HOME variable to specify this directory. + +- Set the Airflow home directory: + +```bash +export AIRFLOW_HOME=~/airflow +``` + +7. **Run Airflow in standalone mode**: Apache Airflow runs several components, like the scheduler, web server, and API server, to manage workflows and show the UI. + +- To run Airflow in standalone mode (which will automatically start the required components): + +```bash +airflow standalone +``` + +8. **Access the Airflow Web UI**: Once the components are up and running, you can access the Airflow UI through your browser: + +- Open your browser and go to: + +```text +http://localhost:8080 +``` diff --git a/INTHEWILD.md b/INTHEWILD.md index 1fcd785c612b7..964dfdcc3cb87 100644 --- a/INTHEWILD.md +++ b/INTHEWILD.md @@ -31,6 +31,7 @@ Currently, **officially** using Airflow: 1. [90 Seconds](https://90seconds.tv/) [[@aaronmak](https://github.com/aaronmak)] 1. [99](https://99taxis.com) [[@fbenevides](https://github.com/fbenevides), [@gustavoamigo](https://github.com/gustavoamigo) & [@mmmaia](https://github.com/mmmaia)] 1. [Accenture](https://www.accenture.com/au-en) [[@nijanthanvijayakumar](https://github.com/nijanthanvijayakumar)] +1. [Acciona Energia France](https://solutions.acciona-energia.fr/) [[@MohamedEqinov](https://github.com/MohamedEqinov)] 1. [AdBOOST](https://www.adboost.sk) [[AdBOOST](https://github.com/AdBOOST)] 1. [Adobe](https://www.adobe.com/) [[@mishikaSingh](https://github.com/mishikaSingh), [@ramandumcs](https://github.com/ramandumcs), [@vardancse](https://github.com/vardancse)] 1. [Adyen](https://www.adyen.com/) [[@jorricks](https://github.com/jorricks), [@MaicoTimmerman](https://github.com/MaicoTimmerman)] @@ -62,7 +63,7 @@ Currently, **officially** using Airflow: 1. [Asana](https://asana.com/) [[@chang](https://github.com/chang), [@dima-asana](https://github.com/dima-asana), [@jdavidheiser](https://github.com/jdavidheiser), [@ricardoandresrojas](https://github.com/ricardoandresrojas)] 1. [Astronomer](https://www.astronomer.io) [[@schnie](https://github.com/schnie), [@ashb](https://github.com/ashb), [@kaxil](https://github.com/kaxil), [@dimberman](https://github.com/dimberman), [@andriisoldatenko](https://github.com/andriisoldatenko), [@ryw](https://github.com/ryw), [@ryanahamilton](https://github.com/ryanahamilton), [@jhtimmins](https://github.com/jhtimmins), [@vikramkoka](https://github.com/vikramkoka), [@jedcunningham](https://github.com/jedcunningham), [@BasPH](https://github.com/basph), [@ephraimbuddy](https://github.com/ephraimbuddy), [@feluelle](https://github.com/feluelle)] 1. [Audiomack](https://audiomack.com) [[@billcrook](https://github.com/billcrook)] -1. [Auth0](https://auth0.com) [[@scottypate](https://github.com/scottypate)], [[@dm03514](https://github.com/dm03514)], [[@karangale](https://github.com/karangale)] +1. [Auth0](https://auth0.com) [[@scottypate](https://github.com/scottypate), [@dm03514](https://github.com/dm03514), [@karangale](https://github.com/karangale)] 1. [Autodesk](https://autodesk.com) 1. [Automattic](https://automattic.com/) [[@anandnalya](https://github.com/anandnalya), [@bperson](https://github.com/bperson), [@khrol](https://github.com/Khrol), [@xyu](https://github.com/xyu)] 1. [Avesta Technologies](https://avestatechnologies.com) [[@TheRum](https://github.com/TheRum)] @@ -90,10 +91,10 @@ Currently, **officially** using Airflow: 1. [BlaBlaCar](https://www.blablacar.com) [[@puckel](https://github.com/puckel) & [@wmorin](https://github.com/wmorin)] 1. [Blacklane](https://www.blacklane.com) [[@serkef](https://github.com/serkef)] 1. [Bloc](https://www.bloc.io) [[@dpaola2](https://github.com/dpaola2)] -1. [Bloomberg](https://www.techatbloomberg.com) [[@skandala23] (https://github.com/skandala23) & [@vfeldsher](https://https://github.com/vfeldsher)] +1. [Bloomberg](https://www.techatbloomberg.com) [[@skandala23](https://github.com/skandala23) & [@vfeldsher](https://https://github.com/vfeldsher)] 1. [Bloomreach](https://www.bloomreach.com/) [[@neelborooah](https://github.com/neelborooah) & [@debodirno](https://github.com/debodirno) & [@ayushmnnit](https://github.com/ayushmnnit)] 1. [Blue Yonder](http://www.blue-yonder.com) [[@blue-yonder](https://github.com/blue-yonder)] -1. [Blue3 Investimentos](https://blue3investimentos.com.br) [[@ericcoleta] (https://github.com/ericcoleta) & [@plutaniano](https://github.com/plutaniano)] +1. [Blue3 Investimentos](https://blue3investimentos.com.br) [[@ericcoleta](https://github.com/ericcoleta) & [@plutaniano](https://github.com/plutaniano)] 1. [BlueApron](https://www.blueapron.com) [[@jasonjho](https://github.com/jasonjho) & [@matthewdavidhauser](https://github.com/matthewdavidhauser)] 1. [Bluecore](https://www.bluecore.com) [[@JLDLaughlin](https://github.com/JLDLaughlin)] 1. [Bluekiri](https://bluekiri.com) [[@Bluekiri](https://github.com/bluekiri)] @@ -118,7 +119,7 @@ Currently, **officially** using Airflow: 1. [Capital One](https://www.capitalone.com) [[@anoopengineer](https://github.com/anoopengineer)] 1. [Carbonite](https://www.carbonite.com) [[@ajbosco](https://github.com/ajbosco)] 1. [CarLabs](https://www.carlabs.ai/) [[@sganz](https://github.com/sganz) & [@odannyc](https://github.com/odannyc)] -1. [Carpe Data](https://www.carpe.io/) [[@manugarri](https://github.com/manugarri)]] +1. [Carpe Data](https://www.carpe.io/) [[@manugarri](https://github.com/manugarri)] 1. [CAVA](https://www.cava.com) [[@minh5](https://github.com/minh5) & [@patchus](https://github.com/patchus)] 1. [Celect](http://www.celect.com) [[@superdosh](https://github.com/superdosh) & [@chadcelect](https://github.com/chadcelect)] 1. [Censys](https://censys.io) [[@zakird](https://github.com/zakird), [@dadrian](https://github.com/dadrian), & [@andrewsardone](https://github.com/andrewsardone)] @@ -127,6 +128,8 @@ Currently, **officially** using Airflow: 1. [Checkr](https://checkr.com) [[@tongboh](https://github.com/tongboh)] 1. [Children's Hospital of Philadelphia Division of Genomic Diagnostics](http://www.chop.edu/centers-programs/division-genomic-diagnostics) [[@genomics-geek](https://github.com/genomics-geek/)] 1. [Cinimex DataLab](http://cinimex.ru) [[@kdubovikov](https://github.com/kdubovikov)] +1. [City of Ann Arbor](https://www.a2gov.org) [[@a2gov](https://github.com/a2gov), [@sfirke](https://github.com/sfirke)] +1. [City of Detroit](https://detroitmi.gov) [[@CityOfDetroit](https://github.com/CityOfDetroit), [@jmcbroom](https://github.com/jmcbroom)] 1. [City of San Diego](http://sandiego.gov) [[@MrMaksimize](https://github.com/mrmaksimize), [@andrell81](https://github.com/andrell81) & [@arnaudvedy](https://github.com/arnaudvedy)] 1. [City of Toronto](https://www.toronto.ca/) [[@CityofToronto](https://github.com/CityofToronto), [@radumas](https://github.com/radumas)] 1. [ciValue](https://civalue.com/) [[@chencivalue](https://github.com/chencivalue), [@YoavGaudin](https://github.com/YoavGaudin), [@saleem-boshnak](https://github.com/saleem-boshnak)] @@ -137,12 +140,13 @@ Currently, **officially** using Airflow: 1. [Classmethod, Inc.](https://classmethod.jp/) [[@shoito](https://github.com/shoito)] 1. [Cleartax](https://cleartax.in/) [[@anks](https://github.com/anks) & [@codebuff](https://github.com/codebuff)] 1. [Clicksign](https://clicksign.com/) [[@mbbernstein](https://github.com/mbbernstein) & [@jorgeac12](https://github.com/jorgeac12) & [@franklin390](https://github.com/franklin390)] -1. [Cloudera](https://www.cloudera.com/) [[@phraniiac](https://github.com/phraniiac) & [@VivekPemawat](https://github.com/VivekPemawat) & [@amoghrajesh](https://github.com/amoghrajesh) & [@vedantlodha](https://github.com/vedantlodha) & [@shubhamraj-git](https://github.com/shubhamraj-git) & [@Samit-Maharjan](https://github.com/Samit-Maharjan)] & [@anukrati1507](https://github.com/anukrati1507) +1. [Cloudera](https://www.cloudera.com/) [[@phraniiac](https://github.com/phraniiac) & [@VivekPemawat](https://github.com/VivekPemawat) & [@amoghrajesh](https://github.com/amoghrajesh) & [@vedantlodha](https://github.com/vedantlodha) & [@shubhamraj-git](https://github.com/shubhamraj-git) & [@Samit-Maharjan](https://github.com/Samit-Maharjan) & [@anukrati1507](https://github.com/anukrati1507)] 1. [Clover Health](https://www.cloverhealth.com) [[@ryansiu1995](https://github.com/ryansiu1995)] 1. [Coinbase](https://www.coinbase.com) [[@mingshi-wang](https://github.com/mingshi-wang)] 1. [Coinone](https://www.coinonecorp.com) [[@jx2lee](https://github.com/jx2lee)] 1. [Colgate-Palmolive](https://www.colgatepalmolive.com/) [[@fhoda](https://github.com/fhoda)] 1. [Collectivehealth Inc.](https://www.collectivehealth.com) [[@retornam](https://github.com/retornam)] +1. [Comcast](https://corporate.comcast.com/) [[@lucid-x](https://github.com/lucid-x)] 1. [Compass](https://www.compass.com) [[@wdhorton](https://github.com/wdhorton)] 1. [ConnectWise](https://www.connectwise.com/) [[@jacobeturpin](https://github.com/jacobeturpin)] 1. [ContaAzul](https://www.contaazul.com) [[@bern4rdelli](https://github.com/bern4rdelli), [@renanleme](https://github.com/renanleme) & [@sabino](https://github.com/sabino)] @@ -173,26 +177,27 @@ Currently, **officially** using Airflow: 1. [dataroots](https://dataroots.io/) [[@datarootsio]](https://github.com/datarootsio) 1. [DataSprints](https://datasprints.com/) [[@lopesdiego12](https://github.com/lopesdiego12) & [@rafaelsantanaep](https://github.com/rafaelsantanaep)] 1. [Datatonic](https://datatonic.com/) [[@teamdatatonic](https://github.com/teamdatatonic)] -1. [Datavant](https://datavant.com)/) [@althati(https://github.com/althati)] +1. [Datavant](https://datavant.com) [[@althati](https://github.com/althati)] 1. [Datumo](https://datumo.io) [[@michalmisiewicz](https://github.com/michalmisiewicz)] 1. [Dcard](https://www.dcard.tw/) [[@damon09273](https://github.com/damon09273) & [@bruce3557](https://github.com/bruce3557) & [@kevin1kevin1k](http://github.com/kevin1kevin1k)] 1. [Delft University of Technology](https://www.tudelft.nl/en/) [[@saveriogzz](https://github.com/saveriogzz)] 1. [Dentsu Inc.](http://www.dentsu.com/) [[@bryan831](https://github.com/bryan831) & [@loozhengyuan](https://github.com/loozhengyuan)] -1. [Deseret Digital Media](http://deseretdigital.com/) [[@formigone](https://github.com/formigone) +1. [Deseret Digital Media](http://deseretdigital.com/) [[@formigone](https://github.com/formigone)] 1. [DevITJobs.com](https://devitjobs.com/) 1. [DFDS](https://www.dfds.com/) [[@timonviola](https://github.com/timonviola)] 1. [Digital First Media](http://www.digitalfirstmedia.com/) [[@duffn](https://github.com/duffn) & [@mschmo](https://github.com/mschmo) & [@seanmuth](https://github.com/seanmuth)] 1. [Disney](https://www.disney.com/) [[@coolbeans201](https://github.com/coolbeans201)] +1. [Docaposte](https://www.docaposte.com) [[@albundy83](https://github.com/albundy83)] 1. [Docsity](https://www.docsity.com/) 1. [Doctrine](https://www.doctrine.fr/)[[@anteverse](https://github.com/anteverse)] -1. [DoorDash](https://www.doordash.com/) +1. [DoorDash](https://www.doordash.com/) [[@chiragtodarka](https://github.com/chiragtodarka)] 1. [Dotmodus](http://dotmodus.com) [[@dannylee12](https://github.com/dannylee12)] 1. [Drivy](https://www.drivy.com) [[@AntoineAugusti](https://github.com/AntoineAugusti)] 1. [Dropbox](https://www.dropbox.com) [[@AlexeySanko](https://github.com/AlexeySanko)] 1. [Dunnhumby](https://www.dunnhumby.com) 1. [Dunzo](https://www.dunzo.com)[[@masterlittle](https://github.com/masterlittle)] 1. [Dynata](https://www.dynata.com) [[@neil3handari](https://github.com/neil3handari)] -1. [e-MPS](https://e-mps.co.uk/)[[@IanDanielM](https://github.com/IanDanielM) +1. [e-MPS](https://e-mps.co.uk/)[[@IanDanielM](https://github.com/IanDanielM)] 1. [Easy Taxi](http://www.easytaxi.com/) [[@caique-lima](https://github.com/caique-lima) & [@diraol](https://github.com/diraol)] 1. [EBANX](https://www.ebanx.com/) [[@diogodilcl](https://github.com/diogodilcl) & [@estevammr](https://github.com/estevammr) & [@filipe-banzoli](https://github.com/filipe-banzoli) & [@lara-clink](https://github.com/lara-clink) & [@Lucasdsvenancio](https://github.com/Lucasdsvenancio) & [@mariotaddeucci](https://github.com/mariotaddeucci) & [@nadiapetramont](https://github.com/nadiapetramont) & [@nathangngencissk](https://github.com/nathangngencissk) & [@patrickjuan](https://github.com/patrickjuan) & [@raafaadg](https://github.com/raafaadg) & [@samebanx](https://github.com/samebanx) & [@thiagoschonrock](https://github.com/thiagoschonrock) & [@whrocha](https://github.com/whrocha)] 1. [Elai Data](https://www.elaidata.com/) [[@lgov](https://github.com/lgov)] @@ -208,7 +213,8 @@ Currently, **officially** using Airflow: 1. [Estrategia Educacional](https://github.com/estrategiahq) [[@jonasrla](https://github.com/jonasrla)] 1. [Etsy](https://www.etsy.com) [[@mchalek](https://github.com/mchalek)] 1. [EUIGS - Admiral Group](https://www.linkedin.com/company/euiitglobalservices) [[@emilioego](https://github.com/emilioego)] -1. [Europcar](https://www.europcar.com/en-us) [[@Conformist101](https://github.com/Conformist101) & [@davidpr91](https://github.com/davidpr91) & [@jcarbonell](https://github.com/jcarbonell)& [@marc-rf](https://github.com/marc-rf)& [@VictorGeaGarcia](https://github.com/VictorGeaGarcia)] +1. [Europace](https://www.europace.de/) +1. [Europcar](https://www.europcar.com/en-us) [[@Conformist101](https://github.com/Conformist101) & [@davidpr91](https://github.com/davidpr91) & [@jcarbonell](https://github.com/jcarbonell) & [@marc-rf](https://github.com/marc-rf) & [@VictorGeaGarcia](https://github.com/VictorGeaGarcia)] 1. [Everis](https://www.everis.com) [[@diegobenedicto](https://github.com/diegobenedicto)] 1. [Everlane](https://everlane.com) [[@NickBenthem](https://github.com/NickBenthem)] 1. [evo.company](https://evo.company/) [[@orhideous](https://github.com/orhideous)] @@ -289,7 +295,7 @@ Currently, **officially** using Airflow: 1. [Inoopa](https://www.inoopa.com/) [[@GraphtyLove](https://github.com/GraphtyLove)] 1. [Instacart 🥕](http://www.instacart.com/) [[@arp1t](https://github.com/arp1t) & [@code-sauce](https://github.com/code-sauce) & [@jasonlew](https://github.com/jasonlew) & [@j4p3](https://github.com/j4p3) & [@lubert](https://github.com/lubert) & [@mmontagna](https://github.com/mmontagna) & [@RyanAD](https://github.com/RyanAD) &[@zzadeh](https://github.com/zzadeh)] 1. [Intellischool 🎓](https://intellischool.co/) [[@intelliscl](https://github.com/intelliscl) & [@dave-philp](https://github.com/dave-philp)] -1. [Inter Platform Inc.](https://www.bancointer.com.br/) [[@wolvery](https://github.com/wolvery) +1. [Inter Platform Inc.](https://www.bancointer.com.br/) [[@wolvery](https://github.com/wolvery)] 1. [Intercom](http://www.intercom.com/) [[@fox](https://github.com/fox) & [@paulvic](https://github.com/paulvic)] 1. [Interia](http://www.interia.pl) 1. [Investorise](https://investorise.com/) [[@svenvarkel](https://github.com/svenvarkel)] @@ -302,6 +308,7 @@ Currently, **officially** using Airflow: 1. [JobTeaser](https://www.jobteaser.com) [[@stefani75](https://github.com/stefani75) & [@knil-sama](https://github.com/knil-sama)] 1. [JULO](https://www.julo.co.id/) [[@sepam](https://github.com/sepam) & [@tenapril](https://github.com/tenapril) & [@verzqy](https://github.com/verzqy)] 1. [Kalibrr](https://www.kalibrr.com/) [[@charlesverdad](https://github.com/charlesverdad)] +1. [Karana Dynamics](https://www.karanadyn.com) [[@aarongaut](https://github.com/aarongaut), [@leakec](https://github.com/leakec) & [@kajain2](https://github.com/kajain2)] 1. [Kargo](https://kargo.com) [[@chaithra-yenikapati](https://github.com/chaithra-yenikapati), [@akarsh3007](https://github.com/akarsh3007) & [@dineshanchan](https://github.com/dineshanchan)] 1. [Karmic](https://karmiclabs.com) [[@hyw](https://github.com/hyw)] 1. [Kayzen](https://kayzen.io) [[@arvindeybram](https://github.com/arvindeybram)] @@ -338,6 +345,7 @@ Currently, **officially** using Airflow: 1. [Menhir Financial](https://www.menhir.ai/) [[@pablo-menhir](https://github.com/pablo-menhir), [@dionisio-menhir](https://github.com/dionisio-menhir) & [@luisjvca-menhir](https://github.com/luisjvca-menhir)] 1. [Mercadoni](https://www.mercadoni.com.co) [[@demorenoc](https://github.com/demorenoc)] 1. [Mercari](http://www.mercari.com/) [[@yu-iskw](https://github.com/yu-iskw)] +1. [Met Office](https://www.metoffice.gov.uk/) [[@MetOffice](https://github.com/MetOffice)] 1. [MeuVendoo](https://www.meuvendoo.com.br) [[@CarlosDutra](https://github.com/CarlosDutra)] 1. [MFG Labs](https://github.com/MfgLabs) 1. [Ministry of Economy of Brazil](https://www.gov.br/economia/) [[@nitaibezerra](https://github.com/nitaibezerra), [@vitorbellini](https://github.com/vitorbellini)] @@ -354,6 +362,7 @@ Currently, **officially** using Airflow: 1. [NASA Jet Propulsion Laboratory](https://www.jpl.nasa.gov) [[@lewismc](https://github.com/lewismc)] 1. [National Bank of Canada](https://nbc.ca) [[@brilhana](https://github.com/brilhana)] 1. [Nav, Inc.](https://nav.com/) [[@tigerjz32](https://github.com/tigerjz32)] +1. [Naver](https://naver.com/) 1. [Neoway](https://www.neoway.com.br/) [[@neowaylabs](https://github.com/orgs/NeowayLabs/people)] 1. [Nerdwallet](https://www.nerdwallet.com) 1. [New Relic](https://www.newrelic.com) [[@marcweil](https://github.com/marcweil)] @@ -388,6 +397,7 @@ Currently, **officially** using Airflow: 1. [Paradigma Digital](https://www.paradigmadigital.com/) [[@paradigmadigital](https://github.com/paradigmadigital)] 1. [Paraná Banco](https://paranabanco.com.br/) [[@lopesdiego12](https://github.com/lopesdiego12/)] 1. [Pathstream](https://pathstream.com) [[@pJackDanger](https://github.com/JackDanger)] +1. [Pattern](https://pattern.com) [[@patterninc](https://github.com/patterninc)] 1. [Paxful](https://paxful.com) [[@ne1r0n](https://github.com/ne1r0n)] 1. [PayFit](https://payfit.com) [[@pcorbel](https://github.com/pcorbel)] 1. [PAYMILL](https://www.paymill.com/) [[@paymill](https://github.com/paymill) & [@matthiashuschle](https://github.com/matthiashuschle)] @@ -460,6 +470,7 @@ Currently, **officially** using Airflow: 1. [SnapTravel](https://www.snaptravel.com/) 1. [SocialCops](https://www.socialcops.com/) [[@vinayak-mehta](https://github.com/vinayak-mehta) & [@sharky93](https://github.com/sharky93)] 1. [Société générale](https://www.societegenerale.fr/) [[@medmrgh](https://github.com/medmrgh) & [@s83](https://github.com/s83)] +1. [Softwrd](https://softwrd.ai/) [[@softwrdai](https://github.com/softwrdai) & [@Mrrobi](https://github.com/Mrrobi)] 1. [Spotahome](https://www.spotahome.com/) [[@spotahome](https://github.com/spotahome)] 1. [SpotHero](https://github.com/spothero) [[@benjigoldberg](https://github.com/benjigoldberg)] 1. [Spotify](https://github.com/spotify) [[@znichols](https://github.com/znichols)] @@ -478,6 +489,7 @@ Currently, **officially** using Airflow: 1. [T2 Systems](http://t2systems.com) [[@unclaimedpants](https://github.com/unclaimedpants)] 1. [Tails.com](https://tails.com/) [[@alanmcruickshank](https://github.com/alanmcruickshank)] 1. [Talkdesk](https://www.talkdesk.com) +1. [Tapestry](https://www.tapestry.com) [[@faheem-khau9](https://github.com/faheem-khau9)] 1. [Tapsi](https://tapsi.ir/) 1. [TEK](https://www.tek.fi/en) [[@telac](https://github.com/telac)] 1. [Tekmetric](https://www.tekmetric.com/) @@ -507,7 +519,7 @@ Currently, **officially** using Airflow: 1. [Topgolf](https://topgolf.com/)[[@BhaveshSK](https://github.com/BhaveshSK)] 1. [Toplyne](https://toplyne.io)[[@Toplyne](https://github.com/Toplyne/)] 1. [Trade Republic](https://traderepublic.com/) -1. [Trakken](https://www.trkkn.com/) [[@itroulli](https://github.com/itroulli), [@gthar](https://github.com/gthar), [@qulo](https://github.com/qulo), [@Oscar-Rod](https://github.com/Oscar-Rod), [@kondla](https://github.com/kondla), [@semuar](https://github.com/semuar), [@ManuelFreytag](https://github.com/ManuelFreytag) +1. [Trakken](https://www.trkkn.com/) [[@itroulli](https://github.com/itroulli), [@gthar](https://github.com/gthar), [@qulo](https://github.com/qulo), [@Oscar-Rod](https://github.com/Oscar-Rod), [@kondla](https://github.com/kondla), [@semuar](https://github.com/semuar), [@ManuelFreytag](https://github.com/ManuelFreytag)] 1. [Travix](https://www.travix.com/) 1. [Trocafone](https://www.trocafone.com/) [[@idontdomath](https://github.com/idontdomath) & [@gseva](https://github.com/gseva) & [@ordonezf](https://github.com/ordonezf) & [@PalmaLeandro](https://github.com/PalmaLeandro)] 1. [TruFactor](https://trufactor.io/) [[@gholmes](https://github.com/gholmes) & [@angadsingh](https://github.com/angadsingh/)] diff --git a/ISSUE_TRIAGE_PROCESS.rst b/ISSUE_TRIAGE_PROCESS.rst index 42f3f9779677c..676a52f41e505 100644 --- a/ISSUE_TRIAGE_PROCESS.rst +++ b/ISSUE_TRIAGE_PROCESS.rst @@ -196,7 +196,7 @@ associated with them such as ``provider:amazon-aws``, ``provider:microsoft-azure These make it easier for developers working on a single provider to track issues for that provider. -Note: each provider has it's own unique label. It is possible for issue to be tagged with more than 1 provider label. +Note: each provider has its own unique label. It is possible for issue to be tagged with more than 1 provider label. Most issues need a combination of "kind" and "area" labels to be actionable. For example: diff --git a/PROVIDERS.rst b/PROVIDERS.rst index 1d1386cceb454..ef3239ae6ab35 100644 --- a/PROVIDERS.rst +++ b/PROVIDERS.rst @@ -57,7 +57,7 @@ releasing new versions of the providers. This means that the code changes in the reviewed by Airflow committers and merged when they are accepted by them. Also we must have sufficient test coverage and documentation that allow us to maintain the providers, and our users to use them. -The providers - their latest version in "main" branch of airflow repository - are installed and tested together +The providers - their latest version in "main" branch of Airflow repository - are installed and tested together with other community providers and one of the key properties of the community providers is that the latest version of providers contribute their dependencies to constraints of Airflow, published when Airflow Core is released. This means that when users are using constraints published by Airflow, they can install all @@ -92,7 +92,7 @@ Accepting new community providers --------------------------------- Accepting new community providers should be a deliberate process that requires ``[DISCUSSION]`` -followed by ``[VOTE]`` thread at the airflow `devlist `_. +followed by ``[VOTE]`` thread at the Airflow `devlist `_. In case the provider is integration with an open-source software rather than service we can relax the vote procedure a bit. Particularly if the open-source software is an Apache Software Foundation, @@ -145,6 +145,10 @@ classification, whether there are breaking changes, new features or just bugs co Upgrading Minimum supported version of Airflow ---------------------------------------------- +.. note:: + + The following policy applies for Airflow 2. It has not yet been finalized for Airflow 3 and is subject to changes. + One of the important limitations of the Providers released by the community is that we introduce the limit of a minimum supported version of Airflow. The minimum version of Airflow is the ``MINOR`` version (2.4, 2.5 etc.) indicating that the providers might use features that appeared in this release. The default support timespan @@ -153,8 +157,8 @@ Airflow version to the next MINOR release, when 12 months passed since the first MINOR version of Airflow. For example this means that by default we upgrade the minimum version of Airflow supported by providers -to 2.9.0 in the first Provider's release after 8th of April 2025. 8th of April 2024 is the date when the -first ``PATCHLEVEL`` of 2.9 (2.9.0) has been released. +to 3.0.0 in the first Provider's release after 22nd of April 2026. 22nd of April 2025 is the date when the +first ``PATCHLEVEL`` version of 3.0 (3.0.0) was released. When we increase the minimum Airflow version, this is not a reason to bump ``MAJOR`` version of the providers (unless there are other breaking changes in the provider). The reason for that is that people who use diff --git a/README.md b/README.md index 8c93ff8f9260b..eb911e71ecf05 100644 --- a/README.md +++ b/README.md @@ -20,20 +20,22 @@ # Apache Airflow -[![PyPI version](https://badge.fury.io/py/apache-airflow.svg)](https://badge.fury.io/py/apache-airflow) -[![GitHub Build](https://github.com/apache/airflow/actions/workflows/ci.yml/badge.svg)](https://github.com/apache/airflow/actions) -[![Coverage Status](https://codecov.io/gh/apache/airflow/graph/badge.svg?token=WdLKlKHOAU)](https://codecov.io/gh/apache/airflow) -[![License](https://img.shields.io/:license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt) -[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/apache-airflow.svg)](https://pypi.org/project/apache-airflow/) -[![Docker Pulls](https://img.shields.io/docker/pulls/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) -[![Docker Stars](https://img.shields.io/docker/stars/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) -[![PyPI - Downloads](https://img.shields.io/pypi/dm/apache-airflow)](https://pypi.org/project/apache-airflow/) -[![Artifact HUB](https://img.shields.io/endpoint?url=https://artifacthub.io/badge/repository/apache-airflow)](https://artifacthub.io/packages/search?repo=apache-airflow) -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) -[![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://s.apache.org/airflow-slack) -[![Contributors](https://img.shields.io/github/contributors/apache/airflow)](https://github.com/apache/airflow/graphs/contributors) -![Commit Activity](https://img.shields.io/github/commit-activity/m/apache/airflow) -[![OSSRank](https://shields.io/endpoint?url=https://ossrank.com/shield/6)](https://ossrank.com/p/6) +| Badges | | +|------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| License | [![License](https://img.shields.io/:license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt) | +| PyPI | [![PyPI version](https://badge.fury.io/py/apache-airflow.svg)](https://badge.fury.io/py/apache-airflow) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/apache-airflow.svg)](https://pypi.org/project/apache-airflow/) [![PyPI - Downloads](https://img.shields.io/pypi/dm/apache-airflow)](https://pypi.org/project/apache-airflow/) | +| Containers | [![Docker Pulls](https://img.shields.io/docker/pulls/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) [![Docker Stars](https://img.shields.io/docker/stars/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) [![Artifact HUB](https://img.shields.io/endpoint?url=https://artifacthub.io/badge/repository/apache-airflow)](https://artifacthub.io/packages/search?repo=apache-airflow) | +| Community | [![Contributors](https://img.shields.io/github/contributors/apache/airflow)](https://github.com/apache/airflow/graphs/contributors) [![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://s.apache.org/airflow-slack) ![Commit Activity](https://img.shields.io/github/commit-activity/m/apache/airflow) [![OSSRank](https://shields.io/endpoint?url=https://ossrank.com/shield/6)](https://ossrank.com/p/6) | + + + +| Version | Build Status | +|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Main | [![GitHub Build main](https://github.com/apache/airflow/actions/workflows/ci-amd.yml/badge.svg)](https://github.com/apache/airflow/actions) [![GitHub Build main](https://github.com/apache/airflow/actions/workflows/ci-arm.yml/badge.svg)](https://github.com/apache/airflow/actions) | +| 3.x | [![GitHub Build 3.0](https://github.com/apache/airflow/actions/workflows/ci-amd.yml/badge.svg?branch=v3-0-test)](https://github.com/apache/airflow/actions) [![GitHub Build 3.0](https://github.com/apache/airflow/actions/workflows/ci-arm.yml/badge.svg?branch=v3-0-test)](https://github.com/apache/airflow/actions) | +| 2.x | [![GitHub Build 2.11](https://github.com/apache/airflow/actions/workflows/ci.yml/badge.svg?branch=v2-11-test)](https://github.com/apache/airflow/actions) | + + @@ -58,6 +60,7 @@ Use Airflow to author workflows as directed acyclic graphs (DAGs) of tasks. The - [Requirements](#requirements) - [Getting started](#getting-started) - [Installing from PyPI](#installing-from-pypi) +- [Installation](#installation) - [Official source code](#official-source-code) - [Convenience packages](#convenience-packages) - [User Interface](#user-interface) @@ -96,14 +99,14 @@ Airflow is not a streaming solution, but it is often used to process real-time d Apache Airflow is tested with: -| | Main version (dev) | Stable version (2.10.5) | -|------------|------------------------|----------------------------| -| Python | 3.9, 3.10, 3.11, 3.12 | 3.8, 3.9, 3.10, 3.11, 3.12 | -| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | -| Kubernetes | 1.29, 1.30, 1.31, 1.32 | 1.27, 1.28, 1.29, 1.30 | -| PostgreSQL | 13, 14, 15, 16, 17 | 12, 13, 14, 15, 16 | -| MySQL | 8.0, 8.4, Innovation | 8.0, 8.4, Innovation | -| SQLite | 3.15.0+ | 3.15.0+ | +| | Main version (dev) | Stable version (3.0.4) | +|------------|------------------------|------------------------| +| Python | 3.10, 3.11, 3.12, 3.13 | 3.9, 3.10, 3.11, 3.12 | +| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | +| Kubernetes | 1.30, 1.31, 1.32, 1.33 | 1.30, 1.31, 1.32, 1.33 | +| PostgreSQL | 13, 14, 15, 16, 17 | 13, 14, 15, 16, 17 | +| MySQL | 8.0, 8.4, Innovation | 8.0, 8.4, Innovation | +| SQLite | 3.15.0+ | 3.15.0+ | \* Experimental @@ -139,6 +142,7 @@ Documentation for dependent projects like provider distributions, Docker image, + ## Installing from PyPI We publish Apache Airflow as `apache-airflow` package in PyPI. Installing it however might be sometimes tricky @@ -154,7 +158,6 @@ constraints files separately per major/minor Python version. You can use them as constraint files when installing Airflow from PyPI. Note that you have to specify correct Airflow tag/version/branch and Python versions in the URL. - 1. Installing just Airflow: > Note: Only `pip` installation is currently officially supported. @@ -174,21 +177,26 @@ them to the appropriate format and workflow that your tool requires. ```bash -pip install 'apache-airflow==2.10.5' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.5/constraints-3.9.txt" +pip install 'apache-airflow==3.0.4' \ + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.4/constraints-3.10.txt" ``` 2. Installing with extras (i.e., postgres, google) ```bash -pip install 'apache-airflow[postgres,google]==2.10.5' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.5/constraints-3.9.txt" +pip install 'apache-airflow[postgres,google]==3.0.4' \ + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.4/constraints-3.10.txt" ``` For information on installing provider distributions, check [providers](http://airflow.apache.org/docs/apache-airflow-providers/index.html). + +## Installation + +For comprehensive instructions on setting up your local development environment and installing Apache Airflow, please refer to the [INSTALLING.md](INSTALLING.md) file. + ## Official source code @@ -230,27 +238,31 @@ following the ASF Policy. - **DAGs**: Overview of all DAGs in your environment. - ![DAGs](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/dags.png) + ![DAGs](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/ui-dark/dags.png) + +- **Assets**: Overview of Assets with dependencies. + + ![Asset Dependencies](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/ui-dark/assets_graph.png) - **Grid**: Grid representation of a DAG that spans across time. - ![Grid](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/grid.png) + ![Grid](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/ui-dark/grid.png) - **Graph**: Visualization of a DAG's dependencies and their current status for a specific run. - ![Graph](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/graph.png) + ![Graph](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/ui-dark/graph.png) - **Home**: Summary statistics of your Airflow environment. - ![Home](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/home.png) + ![Home](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/ui-dark/home.png) - **Backfill**: Backfilling a DAG for a specific date range. - ![Backfill](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/backfill.png) + ![Backfill](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/ui-dark/backfill.png) - **Code**: Quick way to view source code of a DAG. - ![Code](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/code.png) + ![Code](https://raw.githubusercontent.com/apache/airflow/main/airflow-core/docs/img/ui-dark/code.png) ## Semantic versioning @@ -263,7 +275,7 @@ packages: Changing limits for versions of Airflow dependencies is not a breaking change on its own. * **Airflow Providers**: SemVer rules apply to changes in the particular provider's code only. SemVer MAJOR and MINOR versions for the packages are independent of the Airflow version. - For example, `google 4.1.0` and `amazon 3.0.3` providers can happily be installed + For example, `google 4.1.0` and `amazon 3.0.4` providers can happily be installed with `Airflow 2.1.2`. If there are limits of cross-dependencies between providers and Airflow packages, they are present in providers as `install_requires` limitations. We aim to keep backwards compatibility of providers with all previously released Airflow 2 versions but @@ -285,13 +297,14 @@ Apache Airflow version life cycle: -| Version | Current Patch/Minor | State | First Release | Limited Support | EOL/Terminated | -|-----------|-----------------------|-----------|-----------------|-------------------|------------------| -| 2 | 2.10.5 | Supported | Dec 17, 2020 | TBD | TBD | -| 1.10 | 1.10.15 | EOL | Aug 27, 2018 | Dec 17, 2020 | June 17, 2021 | -| 1.9 | 1.9.0 | EOL | Jan 03, 2018 | Aug 27, 2018 | Aug 27, 2018 | -| 1.8 | 1.8.2 | EOL | Mar 19, 2017 | Jan 03, 2018 | Jan 03, 2018 | -| 1.7 | 1.7.1.2 | EOL | Mar 28, 2016 | Mar 19, 2017 | Mar 19, 2017 | +| Version | Current Patch/Minor | State | First Release | Limited Maintenance | EOL/Terminated | +|-----------|-----------------------|-----------|-----------------|-----------------------|------------------| +| 3 | 3.0.4 | Supported | Apr 22, 2025 | TBD | TBD | +| 2 | 2.11.0 | Supported | Dec 17, 2020 | Oct 22, 2025 | Apr 22, 2026 | +| 1.10 | 1.10.15 | EOL | Aug 27, 2018 | Dec 17, 2020 | June 17, 2021 | +| 1.9 | 1.9.0 | EOL | Jan 03, 2018 | Aug 27, 2018 | Aug 27, 2018 | +| 1.8 | 1.8.2 | EOL | Mar 19, 2017 | Jan 03, 2018 | Jan 03, 2018 | +| 1.7 | 1.7.1.2 | EOL | Mar 28, 2016 | Mar 19, 2017 | Mar 19, 2017 | @@ -310,7 +323,7 @@ They are based on the official release schedule of Python and Kubernetes, nicely 1. We drop support for Python and Kubernetes versions when they reach EOL. Except for Kubernetes, a version stays supported by Airflow if two major cloud providers still provide support for it. We drop support for those EOL versions in main right after EOL date, and it is effectively removed when we release - the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.9 it + the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.10 it means that we will drop support in main right after 27.06.2023, and the first MAJOR or MINOR version of Airflow released after will not have it. @@ -421,7 +434,7 @@ Want to help build Apache Airflow? Check out our [contributors' guide](https://g If you can't wait to contribute, and want to get started asap, check out the [contribution quickstart](https://github.com/apache/airflow/blob/main/contributing-docs/03_contributors_quick_start.rst) here! -Official Docker (container) images for Apache Airflow are described in [images](dev/breeze/doc/ci/02_images.md). +Official Docker (container) images for Apache Airflow are described in [images](https://github.com/apache/airflow/blob/main/dev/breeze/doc/ci/02_images.md). diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 7fe4191a52f2f..c418a65ebc6c0 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -19,206 +19,75 @@ :local: :depth: 1 +.. note:: + Release notes for older versions can be found in the versioned documentation. + .. towncrier release notes start -Airflow 3.0.0 (2025-04-22) +Airflow 3.0.4 (2025-08-08) -------------------------- -We are proud to announce the General Availability of **Apache Airflow® 3.0**, the most significant release in the -project's history. Airflow 3.0 builds on the foundation of Airflow 2 and introduces a new service-oriented architecture, -a modern React-based UI, enhanced security, and a host of long-requested features such as DAG versioning, improved -backfills, event-driven scheduling, and support for remote execution. - -Highlights -^^^^^^^^^^ - -Major Architectural Advancements -"""""""""""""""""""""""""""""""" - -- **Task Execution API & Task SDK (AIP-72)**: Airflow 3.0 introduces a service-oriented architecture and a new API Server, enabling tasks to run anywhere, in any language, with improved isolation and security. -- **Edge Executor (AIP-69)**: A new experimental executor that enables edge compute patterns and event-driven orchestration scenarios. -- **Split CLI (AIP-81)**: The core CLI is now divided between local and remote functionality, with a new provider package (``airflowctl``) for API-based remote interactions. - -UI Overhaul -""""""""""" - -- **Modern React UI (AIP-38, AIP-84)**: Airflow's UI has been completely rewritten using React and FastAPI. This new UI supports a better UX across Grid, Graph, and Asset views. -- **DAG Versioning (AIP-65, AIP-66)**: DAG structure changes are now tracked natively. Users can inspect DAG history directly from the UI. - -Expanded Scheduling and Execution -""""""""""""""""""""""""""""""""" - -- **Data Assets & Asset-Driven DAGs (AIP-74, AIP-75)**: Data-aware scheduling has evolved into first-class Asset support, including a new ``@asset`` decorator syntax and asset-based execution. -- **External Event Scheduling (AIP-82)**: DAGs can now be triggered from external events via a pluggable message bus interface. Initial support for AWS SQS is included. -- **Scheduler-Managed Backfills (AIP-78)**: Backfills are now managed by the scheduler, with UI support and enhanced diagnostics. - -ML & AI Use Cases -""""""""""""""""" - -- **Support for Non-Data-Interval DAGs (AIP-83)**: Enables inference DAGs and hyperparameter tuning runs by removing the uniqueness constraint on execution dates. - -Breaking Changes -^^^^^^^^^^^^^^^^ - -See the :doc:`Upgrade Guide ` for a full list of changes and migration recommendations. Major breaking changes include: - -Metadata Database Access -"""""""""""""""""""""""" - -- Direct access to the metadata DB from task code is no longer supported. Use the :doc:`REST API ` instead. - -Scheduling Changes -"""""""""""""""""" - -- New default: ``schedule=None``, ``catchup=False`` -- ``schedule_interval`` and ``timetable`` are removed; use ``schedule`` exclusively. -- Raw cron strings now use ``CronTriggerTimetable`` instead of ``CronDataIntervalTimetable``. - -Context and Parameters -"""""""""""""""""""""" - -- Several context variables removed (``conf``, ``execution_date``, ``dag_run.external_trigger``, etc) -- ``fail_stop`` renamed to ``fail_fast`` -- ``.airflowignore`` now uses glob syntax by default - -Deprecated and Removed Features -""""""""""""""""""""""""""""""" - -+-----------------------------+----------------------------------------------------------+ -| **Feature** | **Replacement** | -+=============================+==========================================================+ -| SubDAGs | Task Groups | -+-----------------------------+----------------------------------------------------------+ -| SLAs | Deadline Alerts (planned post-3.0) | -+-----------------------------+----------------------------------------------------------+ -| ``EmailOperator`` (core) | SMTP provider's ``EmailOperator`` | -+-----------------------------+----------------------------------------------------------+ -| ``dummy`` trigger rule | ``always`` | -+-----------------------------+----------------------------------------------------------+ -| ``none_failed_or_skipped`` | ``none_failed_min_one_success`` | -+-----------------------------+----------------------------------------------------------+ -| XCom pickling | Use a custom XCom backend | -+-----------------------------+----------------------------------------------------------+ - -Upgrade Process -^^^^^^^^^^^^^^^ - -Airflow 3 was designed with migration in mind. Many Airflow 2 DAGs will work without changes. Use these tools: - -- ``ruff check --preview --select AIR30 --fix``: Flag and auto-fix DAG-level changes -- ``airflow config lint``: Identify outdated or removed config options - -**Minimum version required to upgrade**: Airflow 2.7 - -We recommend upgrading to the latest Airflow 2.10.x release before migrating to Airflow 3.0 to benefit from deprecation warnings. Check :doc:`Upgrade Guide ` for more details. - -Resources -^^^^^^^^^ - -- :doc:`Upgrade Guide ` -- `Airflow AIPs `_ - -Airflow 2.10.5 (2025-02-10) ---------------------------- Significant Changes ^^^^^^^^^^^^^^^^^^^ -Ensure teardown tasks are executed when DAG run is set to failed (#45530) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Previously when a DAG run was manually set to "failed" or to "success" state the terminal state was set to all tasks. -But this was a gap for cases when setup- and teardown tasks were defined: If teardown was used to clean-up infrastructure -or other resources, they were also skipped and thus resources could stay allocated. - -As of now when setup tasks had been executed before and the DAG is manually set to "failed" or "success" then teardown -tasks are executed. Teardown tasks are skipped if the setup was also skipped. - -As a side effect this means if the DAG contains teardown tasks, then the manual marking of DAG as "failed" or "success" -will need to keep the DAG in running state to ensure that teardown tasks will be scheduled. They would not be scheduled -if the DAG is directly set to "failed" or "success". - +No significant changes. Bug Fixes """"""""" -- Prevent using ``trigger_rule=TriggerRule.ALWAYS`` in a task-generated mapping within bare tasks (#44751) -- Fix ShortCircuitOperator mapped tasks (#44912) -- Fix premature evaluation of tasks with certain trigger rules (e.g. ``ONE_DONE``) in a mapped task group (#44937) -- Fix task_id validation in BaseOperator (#44938) (#44938) -- Allow fetching XCom with forward slash from the API and escape it in the UI (#45134) -- Fix ``FileTaskHandler`` only read from default executor (#46000) -- Fix empty task instance for log (#45702) (#45703) -- Remove ``skip_if`` and ``run_if`` decorators before TaskFlow virtualenv tasks are run (#41832) (#45680) -- Fix request body for json requests in event log (#45546) (#45560) -- Ensure teardown tasks are executed when DAG run is set to failed (#45530) (#45581) -- Do not update DR on TI update after task execution (#45348) -- Fix object and array DAG params that have a None default (#45313) (#45315) -- Fix endless sensor rescheduling (#45224) (#45250) -- Evaluate None in SQLAlchemy's extended JSON type decorator (#45119) (#45120) -- Allow dynamic tasks to be filtered by ``rendered_map_index`` (#45109) (#45122) -- Handle relative paths when sanitizing URLs (#41995) (#45080) -- Set Autocomplete Off on Login Form (#44929) (#44940) -- Add Webserver parameters ``max_form_parts``, ``max_form_memory_size`` (#46243) (#45749) -- Fixed accessing thread local variable in BaseOperators ``execute`` safeguard mechanism (#44646) (#46280) -- Add map_index parameter to extra links API (#46337) - +- Fix scheduler heartbeat timeout failures with intermittent ``DetachedInstanceError`` crashes (#53838) (#53858) +- Fix connection editing where sensitive fields like passwords and extras were lost when updating connections (#53943) (#53973) +- Fix BaseOperator ``on_kill`` functionality not working when tasks are killed externally in TaskSDK (#53718) (#53832) +- Fix TaskInstance notes not refreshing automatically without manual page refresh (#53307) (#54025) +- Fix invalid execution API URLs causing failures in task supervisor (#53082) (#53518) +- Fix task failure callbacks not running on DAG Processor when tasks are externally killed (#53058) (#53143) +- Fix ``task_success_overtime`` configuration option not being configurable (#53342) (#53351) +- Fix CSS warning for nth-child selector (#53982) (#54000) +- Fix DAG filtering where "all" option did not show all DAGs as expected (#53656) (#53672) +- Fix accordion child contents not being visible when content overflows (#53595) (#53602) +- Fix navbar positioning for anchor calculations (#52016) (#53581) +- Fix DagBag safe mode configuration resolution in DAG processor (#52694) (#53507) +- Fix large log reading causing out-of-memory issues in API server (#49470) (#53167) +- Fix connection exceptions consistency between Airflow 2.x and 3.x (#52968) (#53093) +- Remove unnecessary ``group_by`` clause in event logs query for performance (#53733) (#53807) +- Allow remote logging providers to load connections from API Server (#53719) (#53761) +- Add certificate support for API server client communication with self-signed certificates (#53574) (#53793) +- Respect ``apps`` flags for API server command configuration (#52929) (#53775) +- Skip empty DAG run configuration rows and set statement timeout (#50788) (#53619) +- Remove incorrect warning for ``BaseOperator.executor`` attribute (#53496) (#53519) +- Add back DAG parsing pre-import optimization for improved performance (#50371) (#52698) +- Flexible form use ReactMarkdown instead of default Markdown component (#54032) (#54040) +- Unconditionally disable ``start_from_trigger`` functionality (#53744) (#53750) +- Serialize NaN and infinity values to string (#53835) (#53844) +- Make log redaction safer in edge case when redaction has an error (#54046) (#54048) +- Flexible form use ReactMarkdown instead of default Markdown component (#54032) (#54040) +- Fix inconsistent casing in UI of decorated tasks (#54056) (#54092) Miscellaneous """"""""""""" -- Add traceback log output when SIGTERMs was sent (#44880) (#45077) -- Removed the ability for Operators to specify their own "scheduling deps" (#45713) (#45742) -- Deprecate ``conf`` from Task Context (#44993) - - -Airflow 2.10.4 (2024-12-16) ---------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -TaskInstance ``priority_weight`` is capped in 32-bit signed integer ranges (#43611) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Some database engines are limited to 32-bit integer values. As some users reported errors in -weight rolled-over to negative values, we decided to cap the value to the 32-bit integer. Even -if internally in python smaller or larger values to 64 bit are supported, ``priority_weight`` is -capped and only storing values from -2147483648 to 2147483647. - -Bug Fixes -^^^^^^^^^ - -- Fix stats of dynamic mapped tasks after automatic retries of failed tasks (#44300) -- Fix wrong display of multi-line messages in the log after filtering (#44457) -- Allow "/" in metrics validator (#42934) (#44515) -- Fix gantt flickering (#44488) (#44517) -- Fix problem with inability to remove fields from Connection form (#40421) (#44442) -- Check pool_slots on partial task import instead of execution (#39724) (#42693) -- Avoid grouping task instance stats by try_number for dynamic mapped tasks (#44300) (#44319) -- Re-queue task when they are stuck in queued (#43520) (#44158) -- Suppress the warnings where we check for sensitive values (#44148) (#44167) -- Fix get_task_instance_try_details to return appropriate schema (#43830) (#44133) -- Log message source details are grouped (#43681) (#44070) -- Fix duplication of Task tries in the UI (#43891) (#43950) -- Add correct mime-type in OpenAPI spec (#43879) (#43901) -- Disable extra links button if link is null or empty (#43844) (#43851) -- Disable XCom list ordering by execution_date (#43680) (#43696) -- Fix venv numpy example which needs to be 1.26 at least to be working in Python 3.12 (#43659) -- Fix Try Selector in Mapped Tasks also on Index 0 (#43590) (#43591) -- Prevent using ``trigger_rule="always"`` in a dynamic mapped task (#43810) -- Prevent using ``trigger_rule=TriggerRule.ALWAYS`` in a task-generated mapping within bare tasks (#44751) +- Fix AIRFLOW_API_APPS constant in API server command (#54007) (#54012) +- Add deprecation notice for using Connection from models in favor of SDK approach (#53594) (#53621) +- Remove remnants of ``~=`` used in requires-python configuration (#52985) (#52987) +- Remove upper-binding for "python-requires" specification (#52980) (#52984) +- Update GitPython from 3.1.44 to 3.1.45 (#53725) (#53731)(#53724) (#53732) Doc Only Changes """""""""""""""" -- Update XCom docs around containers/helm (#44570) (#44573) - -Miscellaneous -""""""""""""" -- Raise deprecation warning when accessing inlet or outlet events through str (#43922) - -Airflow 2.10.3 (2024-11-05) ---------------------------- +- Update DAG author documentation to use "DAG author" terminology (#53857) (#53950) +- Update architecture diagrams labels from "Webserver(s)" to "API Server(s)" (#53917) (#54020) +- Remove bold formatting for Public Interface documentation in Airflow 3.0+ (#53955) (#53964) +- Add user-facing documentation for running separate Task Execution API server (#53789) (#53794) +- Add documentation for self-signed certificate configuration (#53788) (#53792) +- Update systemd unit files and documentation for Airflow 3.0 compatibility (#52294) (#53609) +- Update public interface documentation to reflect airflow.sdk and AIP-72 changes (#52197) (#53117) +- Update BaseOperator documentation string for clarity (#53403) (#53404) +- Remove extra slash from endpoint URL formatting (#53755) (#53764) +- Clarify our security model for sensitive connection information (#54088) (#54100) + +Airflow 3.0.3 (2025-07-14) +-------------------------- Significant Changes ^^^^^^^^^^^^^^^^^^^ @@ -227,61 +96,82 @@ No significant changes. Bug Fixes """"""""" -- Improves the handling of value masking when setting Airflow variables for enhanced security. (#43123) (#43278) -- Adds support for task_instance_mutation_hook to handle mapped operators with index 0. (#42661) (#43089) -- Fixes executor cleanup to properly handle zombie tasks when task instances are terminated. (#43065) -- Adds retry logic for HTTP 502 and 504 errors in internal API calls to handle webserver startup issues. (#42994) (#43044) -- Restores the use of separate sessions for writing and deleting RTIF data to prevent StaleDataError. (#42928) (#43012) -- Fixes PythonOperator error by replacing hyphens with underscores in DAG names. (#42993) -- Improving validation of task retries to handle None values (#42532) (#42915) -- Fixes error handling in dataset managers when resolving dataset aliases into new datasets (#42733) -- Enables clicking on task names in the DAG Graph View to correctly select the corresponding task. (#38782) (#42697) -- Prevent redirect loop on /home with tags/last run filters (#42607) (#42609) (#42628) -- Support of host.name in OTEL metrics and usage of OTEL_RESOURCE_ATTRIBUTES in metrics (#42428) (#42604) -- Reduce eyestrain in dark mode with reduced contrast and saturation (#42567) (#42583) -- Handle ENTER key correctly in trigger form and allow manual JSON (#42525) (#42535) -- Ensure DAG trigger form submits with updated parameters upon keyboard submit (#42487) (#42499) -- Do not attempt to provide not ``stringified`` objects to UI via xcom if pickling is active (#42388) (#42486) -- Fix the span link of task instance to point to the correct span in the scheduler_job_loop (#42430) (#42480) -- Bugfix task execution from runner in Windows (#42426) (#42478) -- Allows overriding the hardcoded OTEL_SERVICE_NAME with an environment variable (#42242) (#42441) -- Improves trigger performance by using ``selectinload`` instead of ``joinedload`` (#40487) (#42351) -- Suppress warnings when masking sensitive configs (#43335) (#43337) -- Masking configuration values irrelevant to DAG author (#43040) (#43336) -- Execute templated bash script as file in BashOperator (#43191) -- Fixes schedule_downstream_tasks to include upstream tasks for one_success trigger rule (#42582) (#43299) -- Add retry logic in the scheduler for updating trigger timeouts in case of deadlocks. (#41429) (#42651) -- Mark all tasks as skipped when failing a dag_run manually (#43572) -- Fix ``TrySelector`` for Mapped Tasks in Logs and Details Grid Panel (#43566) -- Conditionally add OTEL events when processing executor events (#43558) (#43567) -- Fix broken stat ``scheduler_loop_duration`` (#42886) (#43544) -- Ensure total_entries in /api/v1/dags (#43377) (#43429) -- Include limit and offset in request body schema for List task instances (batch) endpoint (#43479) -- Don't raise a warning in ExecutorSafeguard when execute is called from an extended operator (#42849) (#43577) + +- Fix task execution failures with large data by improving internal communication protocol (#51924, #53194) +- Fix reschedule sensors failing after multiple re-queue attempts over long periods (#52706) +- Improve ``xcom_pull`` to cover different scenarios for mapped tasks (#51568) +- Fix connection retrieval failures in triggerer when schema field is used (#52691) +- Add back user impersonation (``run_as_user``) support for task execution (#51780) +- Fix DAG version not updating when bundle name changes without DAG structure changes (#51939) +- Add back ``exception`` to context for task callbacks (#52066) +- Fix task log retrieval for retry attempts showing incorrect logs (#51592) +- Fix data interval handling for DAGs created before AIP-39 during serialization (#51913) +- Fix lingering task supervisors when ``EOF`` is missed (#51180) (#51970) +- Persist ``EventsTimetable``'s description during serialization (#51926) +- Delete import error when a dag bundle becomes inactive (#51921) +- Cleanup import errors during DB migration (#51919) +- Fix ``EOF`` detection of subprocesses in Dag Processor (#51895) +- Stop streaming task logs if end of log mark is missing (#51482) +- Allow more empty loops before stopping log streaming (#52624) +- Fix Jinja2 Template deep copy error with ``dag.test`` (#51673) +- Explicitly close log file descriptor in the supervise function (#51654) +- Improve structured logging format and layout (#51567) (#51626) +- Use Connection Hook Names for Dropdown instead of connection IDs (#51613) +- Add back config setting to control exposing stacktrace (#51617) +- Fix task level alias resolution in structure endpoint (#51579) +- Fix backfill creation to include DAG run configuration from form (#51584) +- Fix structure edges in API responses (#51489) +- Make ``dag.test`` consistent with ``airflow dags test`` CLI command (#51476) +- Fix downstream asset attachment at task level in structure endpoint (#51425) +- Fix Task Instance ``No Status`` Filter (#52154) +- UI: Fix backfill creation to respect run backwards setting from form (#52168) +- UI: Set downstream option to default on task instance clear (#52246) +- UI: Enable iframe script execution (#52568) +- UI: Fix DAG tags filter not showing all tags in UI when tags are greater than 50 (#52714) +- UI: Add real-time clock updates to timezone selector (#52414) +- Improve Grid view performance and responsiveness with optimized data loading (#52718,#52822,#52919) +- Fix editing connection with sensitive extra field (#52445) +- Fix archival for cascading deletes by archiving dependent tables first (#51952) +- Fix whitespace handling in DAG owners parsing for multiple owners (#52221) +- Fix SQLite migration from 2.7.0 to 3.0.0 (#51431) +- Fix http exception when ti not found for extra links API (#51465) +- Fix Starting from Trigger when using ``MappedOperator`` (#52681) +- Add ti information to re-queue logs (#49995) +- Task SDK: Fix ``AssetEventOperations.get`` to use ``alias_name`` when specified (#52324) +- Ensure trigger kwargs are properly deserialized during trigger execution (#52721) +- Fixing bad cadwyn migration for upstream map indexes (#52797) +- Run trigger expansion logic only when ``start_from_trigger`` is True (#52873) +- Fix example dag ``example_external_task_parent_deferrable.py`` imports (#52957) +- Fixes pagination in DAG run lists (#52989) +- Fix db downgrade check condition (#53005) +- Fix log viewing for skipped task (#53028,#53101) +- Fixes Grid view refresh after user actions (#53086) +- Fix ``no_status`` and ``duration`` for grid summaries (#53092) +- Fix ``ti.log_url`` not in Task Context (#50376) +- Fix XCom data deserialization when using ``XCom.get_all()`` method (#53102) Miscellaneous """"""""""""" -- Deprecate session auth backend (#42911) -- Removed unicodecsv dependency for providers with Airflow version 2.8.0 and above (#42765) (#42970) -- Remove the referrer from Webserver to Scarf (#42901) (#42942) -- Bump ``dompurify`` from 2.2.9 to 2.5.6 in /airflow/www (#42263) (#42270) -- Correct docstring format in _get_template_context (#42244) (#42272) -- Backport: Bump Flask-AppBuilder to ``4.5.2`` (#43309) (#43318) -- Check python version that was used to install pre-commit venvs (#43282) (#43310) -- Resolve warning in Dataset Alias migration (#43425) + +- Update ``connections_test`` CLI to use Connection instead of BaseHook (#51834) (#51917) +- Fix table pagination when DAG filtering changes (#51795) +- UI: Move asset events to its own tab (#51655) +- Exclude ``libcst`` 1.8.1 for Python 3.9 (#51609) +- UI: Implement navigation on bar click (#50416) +- Reduce unnecessary logging when retrieving connections and variables (#51826) Doc Only Changes """""""""""""""" -- Clarifying PLUGINS_FOLDER permissions by DAG authors (#43022) (#43029) -- Add templating info to TaskFlow tutorial (#42992) -- Airflow local settings no longer importable from dags folder (#42231) (#42603) -- Fix documentation for cpu and memory usage (#42147) (#42256) -- Fix instruction for docker compose (#43119) (#43321) -- Updates documentation to reflect that dag_warnings is returned instead of import_errors. (#42858) (#42888) +- Add note about payload size considerations in API docs (#51768) +- Enhance ENV vars and conns visibility docs (#52026) +- Add http-only warning when running behind proxy in documentation (#52699) +- Publish separate docs for Task SDK (#52682) +- Streamline Taskflow examples and link to core tutorial (#52709) +- Refresh Public Interface & align how-to guides for Airflow 3.0+ (#53011) -Airflow 2.10.2 (2024-09-18) ---------------------------- +Airflow 3.0.2 (2025-06-10) +-------------------------- Significant Changes ^^^^^^^^^^^^^^^^^^^ @@ -290,35 +180,112 @@ No significant changes. Bug Fixes """"""""" -- Revert "Fix: DAGs are not marked as stale if the dags folder change" (#42220, #42217) -- Add missing open telemetry span and correct scheduled slots documentation (#41985) -- Fix require_confirmation_dag_change (#42063) (#42211) -- Only treat null/undefined as falsy when rendering XComEntry (#42199) (#42213) -- Add extra and ``renderedTemplates`` as keys to skip ``camelCasing`` (#42206) (#42208) -- Do not ``camelcase`` xcom entries (#42182) (#42187) -- Fix task_instance and dag_run links from list views (#42138) (#42143) -- Support multi-line input for Params of type string in trigger UI form (#40414) (#42139) -- Fix details tab log url detection (#42104) (#42114) -- Add new type of exception to catch timeout (#42064) (#42078) -- Rewrite how DAG to dataset / dataset alias are stored (#41987) (#42055) -- Allow dataset alias to add more than one dataset events (#42189) (#42247) + +- Fix memory leak in dag-processor (#50558) +- Add back invalid inlet and outlet check before running tasks (#50773) +- Implement slice on LazyXComSequence to allow filtering items from a mapped task(#50117) +- Fix execution API server URL handling for relative paths in KE (#51183) +- Add log lookup exception for Empty operator subtypes (#50325) +- Increase the max zoom on the graph view to make it easier to see small dags on big monitor screens (#50772) +- Fix timezone selection and dashboard layout (#50463) +- Creating backfill for a dag is affecting other dags (#50577) +- Fix next asset schedule and dag card UX (#50271) +- Add bundle path to ``sys.path`` in task runner (#51318) +- Add bundle path to ``sys.path`` in dag processor (#50385) +- Prevent CPU spike in task supervisor when heartbeat timeout exceeded (#51023) +- Fix Airflow Connection Form widget error (#51168) +- Add backwards compatibility shim and deprecation warning for EmailOperator (#51004) +- Handle ``SIGSEGV`` signals during DAG file imports (#51171) +- Fix deferred task resumption in ``dag.test()`` (#51182) +- Fix get dags query to not have join explosion (#50984) +- Ensure Logical date is populated correctly in Context vars (#50898) +- Mask variable values in task logs only if the variable key is sensitive (#50775) +- Mask secrets when retrieving variables from secrets backend (#50895) +- Deserialize should work while retrieving variables with secrets backend (#50889) +- Fix XCom deserialization for mapped tasks with custom backend (#50687) +- Support macros defined via plugins in Airflow 3 (#50642) +- Fix Pydantic ``ForwardRef`` error by reordering discriminated union definitions (#50688) +- Adding backwards compatibility shim for ``BaseNotifier`` (#50340) +- Use latest bundle version when clearing / re-running dag (#50040) +- Handle ``upstream_mapped_index`` when xcom access is needed (#50641) +- Remove unnecessary breaking flag in config command (#50781) +- Do not flood worker logs with secrets backend loading logs (#50581) +- Persist table sorting preferences across sessions using local storage (#50720) +- Fixed patch_task_instance API endpoint to support task instance summaries and task groups (#50550) +- Fixed bulk API schemas to improve OpenAPI compatibility and client generation (#50852) +- Fixed variable API endpoints to support keys containing slashes (#50841) +- Restored backward compatibility for the ``/run`` API endpoint for older Task SDK clients +- Fixed dropdown overflow and error text styling in ``FlexibleForm`` component (#50845) +- Corrected DAG tag rendering to display ``+1 more`` when tags exceed the display limit by one (#50669) +- Fix permission check on the ui config endpoint (#50564) +- Fix ``default_args`` handling in operator ``.partial()`` to prevent ``TypeError`` when unused keys are present (#50525) +- DAG Processor: Fix index to sort by last parsing duration (#50388) +- UI: Fix border overlap issue in the Events page (#50453) +- Fix ``airflow tasks clear`` command (#49631) +- Restored support for ``--local`` flag in ``dag list`` and ``dag list-import-errors`` CLI commands (#49380) +- CLI: Exclude example dags when a bundle is passed (#50401) +- Fix CLI export to handle stdout without file descriptors (#50328) +- Fix ``DagProcessor`` stats log to show the correct parse duration (#50316) +- Fix OpenAPI schema for ``get_log`` API (#50547) +- Remove ``logical_date`` check when validating inlets and outlets (#51464) +- Guard ``ti`` update state and set task to fail if exception encountered (#51295) Miscellaneous """"""""""""" -- Limit universal-pathlib below ``0.2.4`` as it breaks our integration (#42101) -- Auto-fix default deferrable with ``LibCST`` (#42089) -- Deprecate ``--tree`` flag for ``tasks list`` cli command (#41965) + +- UI: Implement navigation on bar click (#50416) +- UI: Always Show Trends count in Dag Overview (#50183) +- UI: Add basic json check to variable value +- Remove filtering by last dag run state in patch dags endpoint (#51347) +- Ensure that both public and ui dags endpoints map to DagService (#51226) +- Refresh Dag details page on new run (#51173) +- Log fallback to None when no XCom value is found (#51285) +- Move ``example_dags`` in standard provider to ``example_dags`` in sources (#51275) +- Bring back "standard" example dags to the ``airflow-core`` package (#51192) +- Faster note on grid endpoint (#51247) +- Port ``task.test`` to Task SDK (#50827) +- Port ``dag.test`` to Task SDK (#50300,#50419) +- Port ``ti.run`` to Task SDK execution path (#50141) +- Support running ``airflow dags test`` from local files (#50420) +- Move macros to task SDK ``execution_time`` module (#50940) +- Add a link to the Airflow logo in Nav (#50304) +- UI: Bump minor and patch package json dependencies (#50298) +- Added a direct link to the latest DAG run in the DAG header (#51119,#51148) +- Fetch only the most recent ``dagrun`` value for list display (#50834) +- Move ``secret_key`` config to ``api`` section (#50839) +- Move various ``webserver`` configs to ``fab`` provider (#50774,#50269,#50208,#50896) +- Make ``dag_run`` nullable in Details page (#50719) +- Rename Operation IDs for task instance endpoints to include map indexes (#49608) +- Update default sort for connections and dags (#50600) +- Raise exception if downgrade can't proceed due to no ``ab_user`` table (#50343) +- Enable JSON serialization for variables created via the bulk API (#51057) +- Always display the backfill option in the UI; enable it only for DAGs with a defined schedule (#50969) +- Optimized DAG header to fetch only the most recent DAG run for improved performance (#50767) +- Add ``owner_links`` field to ``DAGDetailsResponse`` for enhanced owner metadata in the API (#50557) +- UI: Move map index column to be in line with other columns when viewing a summary mapped tasks (#50302) +- Separate configurations for colorized and json logs in Task SDK / Celery Executor (#51082) +- Enhanced task log viewer with virtualized rendering for improved performance on large logs (#50746) Doc Only Changes """""""""""""""" -- Update ``security_model.rst`` to clear unauthenticated endpoints exceptions (#42085) -- Add note about dataclasses and attrs to XComs page (#42056) -- Improve docs on markdown docs in DAGs (#42013) -- Add warning that listeners can be dangerous (#41968) - -Airflow 2.10.1 (2024-09-05) ---------------------------- +- Add dates for Limited Maintenance & EOL for Airflow 2.x (#50794) +- Add Apache Airflow setup instructions for Apple Silicon (#50179) +- Update recommendation for upgrade path to airflow 3 (#50318) +- Add "disappearing DAGs" section on FAQ doc (#49987) +- Update Airflow 3 migration guide with step about custom operators (#50871) (#50948) +- Use ``AssetAlias`` for alias in Asset ``Metadata`` example (#50768) +- Do not use outdated ``schedule_interval`` in tutorial dags (#50947) +- Add Airflow Version in Page Title (#50358) +- Fix callbacks docs (#50377) +- Updating operator extra links doc (#50197) +- Prune old Airflow versions from release notes (#50860) +- Fix types in config templates reference (#50792) +- Fix wrong import for ``PythonOperator`` in tutorial dag (#50962) +- Better structure of extras documentation (#50495) + +Airflow 3.0.1 (2025-05-12) +-------------------------- Significant Changes ^^^^^^^^^^^^^^^^^^^ @@ -327,8324 +294,4253 @@ No significant changes. Bug Fixes """"""""" -- Handle Example dags case when checking for missing files (#41874) -- Fix logout link in "no roles" error page (#41845) -- Set end_date and duration for triggers completed with end_from_trigger as True. (#41834) -- DAGs are not marked as stale if the dags folder change (#41829) -- Fix compatibility with FAB provider versions <1.3.0 (#41809) -- Don't Fail LocalTaskJob on heartbeat (#41810) -- Remove deprecation warning for cgitb in Plugins Manager (#41793) -- Fix log for notifier(instance) without ``__name__`` (#41699) -- Splitting syspath preparation into stages (#41694) -- Adding url sanitization for extra links (#41680) -- Fix InletEventsAccessors type stub (#41607) -- Fix UI rendering when XCom is INT, FLOAT, BOOL or NULL (#41605) -- Fix try selector refresh (#41503) -- Incorrect try number subtraction producing invalid span id for OTEL airflow (#41535) -- Add WebEncoder for trigger page rendering to avoid render failure (#41485) -- Adding ``tojson`` filter to example_inlet_event_extra example dag (#41890) -- Add backward compatibility check for executors that don't inherit BaseExecutor (#41927) + +- Improves the handling of value masking when setting Airflow variables for enhanced security (#43123) +- Make entire task box clickable to select the task (#49299) +- Vertically align task log header components in full screen mode (#49569) +- Remove ``dag_code`` records with no serialized dag (#49478) +- Clear out the ``dag_code`` and ``serialized_dag`` tables on 3.0 upgrade (#49563) +- Remove extra slash so that the runs tab is selected (#49600) +- Null out the ``scheduler_interval`` field on downgrade (#49583) +- Logout functionality should respect ``base_url`` in api server (#49545) +- Fix bug with showing invalid credentials on Login UI (#49556) +- Fix Dag Code text selection when dark mode is enabled (#49649) +- Bugfix: ``max_active_tis_per_dag`` is not respected by dynamically mapped tasks (#49708) +- Fix infinite redirect caused by mistakenly setting token cookie as secure (#49721) +- Better handle safe url redirects in login form for ``SimpleAuthManager`` (#49697)(#49866) +- API: Add missing ``bundle_version`` to DagRun response (#49726) +- Display bundle version in Dag details tab (#49787) +- Fix gcp remote log module import in airflow local settings (#49788) +- Bugfix: Grid view stops loading when there is a pending task to be expanded (#49772) +- Treat single ``task_ids`` in ``xcom_pull`` the same as multiple when provided as part of a list (#49692) +- UI: Auto refresh Home page stats (#49830) +- UI: Error alert overflows out of the alert box (#49880) +- Show backfill banner after creating a new backfill (#49666) +- Mark ``DAGModel`` stale and associate bundle on import errors to aid migration from 2.10.5 (#49769) +- Improve detection and handling of timed out DAG processor processes (#49868) +- Fix editing port for connections (#50002) +- Improve & Fix grid endpoint response time (#49969) +- Update time duration format (#49914) +- Fix Dashboard overflow and handle no status tasks (#49964) +- Fix timezone setting for logical date input on Trigger Run form (#49662) +- Help ``pip`` with avoiding resolution too deep issues in Python 3.12 (#49853) +- Bugfix: backfill dry run does not use same timezone as create backfill (#49911) +- Fix Edit Connection when connection is imported (#49989) +- Bugfix: Filtering items from a mapped task is broken (#50011) +- Fix Dashboard for queued DagRuns (#49961) +- Fix backwards-compat import path for ``BashSensor`` (#49935) +- Apply task group sorting based on webserver config in grid structure response (#49418) +- Render custom ``map_index_template`` on task completion (#49809) +- Fix ``ContinuousTimetable`` false triggering when last run ends in future (#45175) +- Make Trigger Dag form warning more obvious (#49981) +- Restore task hover and selection indicators in the Grid view (#50050) +- Fix datetime validation for backfills (#50116) +- Fix duration charts (#50094) +- Fix DAG node selections (#50095) +- UI: Fix date range field alignment (#50086) +- Add auto-refresh for ``Stats`` (#50088) +- UI: Fixes validation error and adds error indicator for Params form (#50127) +- fix: wrap overflowing texts of asset events (#50173) +- Add audit log extra to table and improve UX (#50100) +- Handle map indexes for Mapped ``TaskGroup`` (#49996) +- Do not use introspection in migration to fix offline SQL generation (#49873) +- Fix operator extra links for mapped tasks (#50238) +- Fix backfill form (#50249)(#50243) +- UI: Fix operator overflow in graph (#50252) +- UI: Pass ``mapIndex`` to clear the relevant task instances. (#50256) +- Fix markdown rendering on dag docs (#50142) Miscellaneous """"""""""""" -- Bump webpack from 5.76.0 to 5.94.0 in /airflow/www (#41879) -- Adding rel property to hyperlinks in logs (#41783) -- Field Deletion Warning when editing Connections (#41504) -- Make Scarf usage reporting in major+minor versions and counters in buckets (#41900) -- Lower down universal-pathlib minimum to 0.2.2 (#41943) -- Protect against None components of universal pathlib xcom backend (#41938) + +- Add ``STRAIGHT_JOIN`` prefix for MySQL query optimization in ``get_sorted_triggers`` (#46303) +- Ensure ``sqlalchemy[asyncio]`` extra is in core deps (#49452) +- Remove unused constant ``HANDLER_SUPPORTS_TRIGGERER`` (#49370) +- Remove sort indicators on XCom table to avoid confusion (#49547) +- Remove ``gitpython`` as a core dependency (#49537) +- Bump ``@babel/runtime`` from ``7.26.0`` to ``7.27.0`` (#49479) +- Add backwards compatibility shim for ``get_current_context`` (#49630) +- AIP-38: enhance layout for ``RunBackfillForm`` (#49609) +- AIP-38: merge Backfill and Trigger Dag Run (#49490) +- Add count to Stats Cards in Dashboard (#49519) +- Add auto-refresh to health section for live updates. (#49645) +- Tweak Execution API OpenAPI spec to improve code Generation (#49700) +- Stricter validation for ``backfill_id`` (#49691)(#49716) +- Add ``SimpleAllAdminMiddleware`` to allow api usage without auth header in request (#49599) +- Bump ``react-router`` and ``react-router-dom`` from 7.4.0 to 7.5.2 (#49742) +- Remove reference to ``root_dag_id`` in dagbag and restore logic (#49668) +- Fix a few SqlAlchemy deprecation warnings (#49477) +- Make default execution server URL be relative to API Base URL (#49747)(#49782) +- Common ``airflow.cfg`` files across all containers in default ``docker-compose.yaml`` (#49681) +- Add redirects for old operators location to standard provider (#49776) +- Bump packaging from 24.2 to 25.0 in ``/airflow-core`` (#49512) +- Move some non-core dependencies to the ``apache-airflow`` meta package (#49846) +- Add more lower-bind limits to address resolution too deep (#49860) +- UI: Add counts to pool bar (#49894) +- Add type hints for ``@task.kuberenetes_cmd`` (#46913) +- Bump ``vite`` from ``5.4.17`` to ``5.4.19`` for Airflow UI (#49162)(#50074) +- Add ``map_index`` filter option to ``GetTICount`` and ``GetTaskStates`` (#49818) +- Add ``stats`` ui endpoint (#49985) +- Add link to tag to filter dags associated with the tag (#49680) +- Add keyboard shortcut for full screen and wrap in logs. (#50008) +- Update graph node styling to decrease border width on tasks in UI (#50047) (#50073) +- Allow non-string valid JSON values in Variable import. (#49844) +- Bump min versions of crucial providers (#50076) +- Add ``state`` attribute to ``RuntimeTaskInstance`` for easier ``ti.state`` access in Task Context (#50031) +- Move SQS message queue to Amazon provider (#50057) +- Execution API: Improve task instance logging with structlog context (#50120) +- Add ``dag_run_conf`` to ``RunBackfillForm`` (#49763) +- Refactor Dashboard to enhance layout (#50026) +- Add the download button on the assets page (#50045) +- Add ``dateInterval`` validation and error handling (#50072) +- Add ``Task Instances [{map_index}]`` tab to mapped task details (#50085) +- Add focus view on grid and graph on second click (#50125) +- Add formatted extra to asset events (#50124) +- Move webserver expose config to api section (#50209) Doc Only Changes """""""""""""""" -- Remove Debian bullseye support (#41569) -- Add an example for auth with ``keycloak`` (#41791) +- Remove flask application configuration from docs for AF3 (#49393) +- Docker compose: airflow-cli to depend on airflow common services (#49318) +- Better upgrade docs about flask/fab plugins in Airflow 3 (#49632)(#49614)(#49628) +- Various Airflow 3.0 Release notes & Updating guide docs updates (#49623)(#49401)(#49654)(#49663)(#49988)(#49954)(#49840)(#50195)(#50264) +- Add using the rest api by referring to ``security/api.rst`` (#49675) +- Add correct redirects for rest api and upgrade docs (#49764) +- Update ``max_consecutive_failed_dag_runs`` default value to zero in TaskSDK dag (#49795) (#49803) +- Fix spacing issues in params example dag (``example_params_ui_tutorial``) (#49905) +- Doc: Fix Kubernetes duplicated version in maintenance policy (#50030) +- Fix links to source examples in Airflow docs (#50082) +- Update ruff instructions for migration checks (#50232) +- Fix example of backfill command (#50222) +- Update docs for running behind proxy for Content-Security-Policy (#50236) -Airflow 2.10.0 (2024-08-15) ---------------------------- +Airflow 3.0.0 (2025-04-22) +-------------------------- +We are proud to announce the General Availability of Apache Airflow 3.0 — the most significant release in the project's +history. This version introduces a service-oriented architecture, a stable DAG authoring interface, expanded support for +event-driven and ML workflows, and a fully modernized UI built on React. Airflow 3.0 reflects years of community +investment and lays the foundation for the next era of scalable, modular orchestration. -Significant Changes -^^^^^^^^^^^^^^^^^^^ +Highlights +^^^^^^^^^^ -Scarf based telemetry: Airflow now collect telemetry data (#39510) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Airflow integrates Scarf to collect basic usage data during operation. Deployments can opt-out of data collection by -setting the ``[usage_data_collection]enabled`` option to ``False``, or the ``SCARF_ANALYTICS=false`` environment variable. +- **Service-Oriented Architecture**: A new Task Execution API and ``airflow api-server`` enable task execution in remote environments with improved isolation and flexibility (AIP-72). -Datasets no longer trigger inactive DAGs (#38891) -""""""""""""""""""""""""""""""""""""""""""""""""" +- **Edge Executor**: A new executor that supports distributed, event-driven, and edge-compute workflows (AIP-69), now generally available. -Previously, when a DAG is paused or removed, incoming dataset events would still -trigger it, and the DAG would run when it is unpaused or added back in a DAG -file. This has been changed; a DAG's dataset schedule can now only be satisfied -by events that occur when the DAG is active. While this is a breaking change, -the previous behavior is considered a bug. +- **Stable Authoring Interface**: DAG authors should now use the new ``airflow.sdk`` namespace to import core DAG constructs like ``@dag``, ``@task``, and ``DAG``. -The behavior of time-based scheduling is unchanged, including the timetable part -of ``DatasetOrTimeSchedule``. +- **Scheduler-Managed Backfills**: Backfills are now scheduled and tracked like regular DAG runs, with native UI and API support (AIP-78). -``try_number`` is no longer incremented during task execution (#39336) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +- **DAG Versioning**: Airflow now tracks structural changes to DAGs over time, enabling inspection of historical DAG definitions via the UI and API (AIP-66). -Previously, the try number (``try_number``) was incremented at the beginning of task execution on the worker. This was problematic for many reasons. -For one it meant that the try number was incremented when it was not supposed to, namely when resuming from reschedule or deferral. And it also resulted in -the try number being "wrong" when the task had not yet started. The workarounds for these two issues caused a lot of confusion. - -Now, instead, the try number for a task run is determined at the time the task is scheduled, and does not change in flight, and it is never decremented. -So after the task runs, the observed try number remains the same as it was when the task was running; only when there is a "new try" will the try number be incremented again. - -One consequence of this change is, if users were "manually" running tasks (e.g. by calling ``ti.run()`` directly, or command line ``airflow tasks run``), -try number will no longer be incremented. Airflow assumes that tasks are always run after being scheduled by the scheduler, so we do not regard this as a breaking change. - -``/logout`` endpoint in FAB Auth Manager is now CSRF protected (#40145) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The ``/logout`` endpoint's method in FAB Auth Manager has been changed from ``GET`` to ``POST`` in all existing -AuthViews (``AuthDBView``, ``AuthLDAPView``, ``AuthOAuthView``, ``AuthOIDView``, ``AuthRemoteUserView``), and -now includes CSRF protection to enhance security and prevent unauthorized logouts. - -OpenTelemetry Traces for Apache Airflow (#37948). -""""""""""""""""""""""""""""""""""""""""""""""""" -This new feature adds capability for Apache Airflow to emit 1) airflow system traces of scheduler, -triggerer, executor, processor 2) DAG run traces for deployed DAG runs in OpenTelemetry format. Previously, only metrics were supported which emitted metrics in OpenTelemetry. -This new feature will add richer data for users to use OpenTelemetry standard to emit and send their trace data to OTLP compatible endpoints. - -Decorator for Task Flow ``(@skip_if, @run_if)`` to make it simple to apply whether or not to skip a Task. (#41116) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -This feature adds a decorator to make it simple to skip a Task. - -Using Multiple Executors Concurrently (#40701) -"""""""""""""""""""""""""""""""""""""""""""""" -Previously known as hybrid executors, this new feature allows Airflow to use multiple executors concurrently. DAGs, or even individual tasks, can be configured -to use a specific executor that suits its needs best. A single DAG can contain tasks all using different executors. Please see the Airflow documentation for -more details. Note: This feature is still experimental. See `documentation on Executor `_ for a more detailed description. - -New Features -"""""""""""" -- AIP-61 Hybrid Execution (`AIP-61 `_) -- AIP-62 Getting Lineage from Hook Instrumentation (`AIP-62 `_) -- AIP-64 TaskInstance Try History (`AIP-64 `_) -- AIP-44 Internal API (`AIP-44 `_) -- Enable ending the task directly from the triggerer without going into the worker. (#40084) -- Extend dataset dependencies (#40868) -- Feature/add token authentication to internal api (#40899) -- Add DatasetAlias to support dynamic Dataset Event Emission and Dataset Creation (#40478) -- Add example DAGs for inlet_events (#39893) -- Implement ``accessors`` to read dataset events defined as inlet (#39367) -- Decorator for Task Flow, to make it simple to apply whether or not to skip a Task. (#41116) -- Add start execution from triggerer support to dynamic task mapping (#39912) -- Add try_number to log table (#40739) -- Added ds_format_locale method in macros which allows localizing datetime formatting using Babel (#40746) -- Add DatasetAlias to support dynamic Dataset Event Emission and Dataset Creation (#40478, #40723, #40809, #41264, #40830, #40693, #41302) -- Use sentinel to mark dag as removed on re-serialization (#39825) -- Add parameter for the last number of queries to the DB in DAG file processing stats (#40323) -- Add prototype version dark mode for Airflow UI (#39355) -- Add ability to mark some tasks as successful in ``dag test`` (#40010) -- Allow use of callable for template_fields (#37028) -- Filter running/failed and active/paused dags on the home page(#39701) -- Add metrics about task CPU and memory usage (#39650) -- UI changes for DAG Re-parsing feature (#39636) -- Add Scarf based telemetry (#39510, #41318) -- Add dag re-parsing request endpoint (#39138) -- Redirect to new DAGRun after trigger from Grid view (#39569) -- Display ``endDate`` in task instance tooltip. (#39547) -- Implement ``accessors`` to read dataset events defined as inlet (#39367, #39893) -- Add color to log lines in UI for error and warnings based on keywords (#39006) -- Add Rendered k8s pod spec tab to ti details view (#39141) -- Make audit log before/after filterable (#39120) -- Consolidate grid collapse actions to a single full screen toggle (#39070) -- Implement Metadata to emit runtime extra (#38650) -- Add executor field to the DB and parameter to the operators (#38474) -- Implement context accessor for DatasetEvent extra (#38481) -- Add dataset event info to dag graph (#41012) -- Add button to toggle datasets on/off in dag graph (#41200) -- Add ``run_if`` & ``skip_if`` decorators (#41116) -- Add dag_stats rest api endpoint (#41017) -- Add listeners for Dag import errors (#39739) -- Allowing DateTimeSensorAsync, FileSensor and TimeSensorAsync to start execution from trigger during dynamic task mapping (#41182) - - -Improvements -"""""""""""" -- Allow set Dag Run resource into Dag Level permission: extends Dag's access_control feature to allow Dag Run resource permissions. (#40703) -- Improve security and error handling for the internal API (#40999) -- Datasets UI Improvements (#40871) -- Change DAG Audit log tab to Event Log (#40967) -- Make standalone dag file processor works in DB isolation mode (#40916) -- Show only the source on the consumer DAG page and only triggered DAG run in the producer DAG page (#41300) -- Update metrics names to allow multiple executors to report metrics (#40778) -- Format DAG run count (#39684) -- Update styles for ``renderedjson`` component (#40964) -- Improve ATTRIBUTE_REMOVED sentinel to use class and more context (#40920) -- Make XCom display as react json (#40640) -- Replace usages of task context logger with the log table (#40867) -- Rollback for all retry exceptions (#40882) (#40883) -- Support rendering ObjectStoragePath value (#40638) -- Add try_number and map_index as params for log event endpoint (#40845) -- Rotate fernet key in batches to limit memory usage (#40786) -- Add gauge metric for 'last_num_of_db_queries' parameter (#40833) -- Set parallelism log messages to warning level for better visibility (#39298) -- Add error handling for encoding the dag runs (#40222) -- Use params instead of dag_run.conf in example DAG (#40759) -- Load Example Plugins with Example DAGs (#39999) -- Stop deferring TimeDeltaSensorAsync task when the target_dttm is in the past (#40719) -- Send important executor logs to task logs (#40468) -- Open external links in new tabs (#40635) -- Attempt to add ReactJSON view to rendered templates (#40639) -- Speeding up regex match time for custom warnings (#40513) -- Refactor DAG.dataset_triggers into the timetable class (#39321) -- add next_kwargs to StartTriggerArgs (#40376) -- Improve UI error handling (#40350) -- Remove double warning in CLI when config value is deprecated (#40319) -- Implement XComArg concat() (#40172) -- Added ``get_extra_dejson`` method with nested parameter which allows you to specify if you want the nested json as string to be also deserialized (#39811) -- Add executor field to the task instance API (#40034) -- Support checking for db path absoluteness on Windows (#40069) -- Introduce StartTriggerArgs and prevent start trigger initialization in scheduler (#39585) -- Add task documentation to details tab in grid view (#39899) -- Allow executors to be specified with only the class name of the Executor (#40131) -- Remove obsolete conditional logic related to try_number (#40104) -- Allow Task Group Ids to be passed as branches in BranchMixIn (#38883) -- Javascript connection form will apply CodeMirror to all textarea's dynamically (#39812) -- Determine needs_expansion at time of serialization (#39604) -- Add indexes on dag_id column in referencing tables to speed up deletion of dag records (#39638) -- Add task failed dependencies to details page (#38449) -- Remove webserver try_number adjustment (#39623) -- Implement slicing in lazy sequence (#39483) -- Unify lazy db sequence implementations (#39426) -- Add ``__getattr__`` to task decorator stub (#39425) -- Allow passing labels to FAB Views registered via Plugins (#39444) -- Simpler error message when trying to offline migrate with sqlite (#39441) -- Add soft_fail to TriggerDagRunOperator (#39173) -- Rename "dataset event" in context to use "outlet" (#39397) -- Resolve ``RemovedIn20Warning`` in ``airflow task`` command (#39244) -- Determine fail_stop on client side when db isolated (#39258) -- Refactor cloudpickle support in Python operators/decorators (#39270) -- Update trigger kwargs migration to specify existing_nullable (#39361) -- Allowing tasks to start execution directly from triggerer without going to worker (#38674) -- Better ``db migrate`` error messages (#39268) -- Add stacklevel into the ``suppress_and_warn`` warning (#39263) -- Support searching by dag_display_name (#39008) -- Allow sort by on all fields in MappedInstances.tsx (#38090) -- Expose count of scheduled tasks in metrics (#38899) -- Use ``declarative_base`` from ``sqlalchemy.orm`` instead of ``sqlalchemy.ext.declarative`` (#39134) -- Add example DAG to demonstrate emitting approaches (#38821) -- Give ``on_task_instance_failed`` access to the error that caused the failure (#38155) -- Simplify dataset serialization (#38694) -- Add heartbeat recovery message to jobs (#34457) -- Remove select_column option in TaskInstance.get_task_instance (#38571) -- Don't create session in get_dag if not reading dags from database (#38553) -- Add a migration script for encrypted trigger kwargs (#38358) -- Implement render_templates on TaskInstancePydantic (#38559) -- Handle optional session in _refresh_from_db (#38572) -- Make type annotation less confusing in task_command.py (#38561) -- Use fetch_dagrun directly to avoid session creation (#38557) -- Added ``output_processor`` parameter to ``BashProcessor`` (#40843) -- Improve serialization for Database Isolation Mode (#41239) -- Only orphan non-orphaned Datasets (#40806) -- Adjust gantt width based on task history dates (#41192) -- Enable scrolling on legend with high number of elements. (#41187) - -Bug Fixes -""""""""" -- Bugfix for get_parsing_context() when ran with LocalExecutor (#40738) -- Validating provider documentation urls before displaying in views (#40933) -- Move import to make PythonOperator working on Windows (#40424) -- Fix dataset_with_extra_from_classic_operator example DAG (#40747) -- Call listener on_task_instance_failed() after ti state is changed (#41053) -- Add ``never_fail`` in BaseSensor (#40915) -- Fix tasks API endpoint when DAG doesn't have ``start_date`` (#40878) -- Fix and adjust URL generation for UI grid and older runs (#40764) -- Rotate fernet key optimization (#40758) -- Fix class instance vs. class type in validate_database_executor_compatibility() call (#40626) -- Clean up dark mode (#40466) -- Validate expected types for args for DAG, BaseOperator and TaskGroup (#40269) -- Exponential Backoff Not Functioning in BaseSensorOperator Reschedule Mode (#39823) -- local task job: add timeout, to not kill on_task_instance_success listener prematurely (#39890) -- Move Post Execution Log Grouping behind Exception Print (#40146) -- Fix triggerer race condition in HA setting (#38666) -- Pass triggered or existing DAG Run logical date to DagStateTrigger (#39960) -- Passing ``external_task_group_id`` to ``WorkflowTrigger`` (#39617) -- ECS Executor: Set tasks to RUNNING state once active (#39212) -- Only heartbeat if necessary in backfill loop (#39399) -- Fix trigger kwarg encryption migration (#39246) -- Fix decryption of trigger kwargs when downgrading. (#38743) -- Fix wrong link in TriggeredDagRuns (#41166) -- Pass MapIndex to LogLink component for external log systems (#41125) -- Add NonCachingRotatingFileHandler for worker task (#41064) -- Add argument include_xcom in method resolve an optional value (#41062) -- Sanitizing file names in example_bash_decorator DAG (#40949) -- Show dataset aliases in dependency graphs (#41128) -- Render Dataset Conditions in DAG Graph view (#41137) -- Add task duration plot across dagruns (#40755) -- Add start execution from trigger support for existing core sensors (#41021) -- add example dag for dataset_alias (#41037) -- Add dataset alias unique constraint and remove wrong dataset alias removing logic (#41097) -- Set "has_outlet_datasets" to true if "dataset alias" exists (#41091) -- Make HookLineageCollector group datasets by (#41034) -- Enhance start_trigger_args serialization (#40993) -- Refactor ``BaseSensorOperator`` introduce ``skip_policy`` parameter (#40924) -- Fix viewing logs from triggerer when task is deferred (#41272) -- Refactor how triggered dag run url is replaced (#41259) -- Added support for additional sql alchemy session args (#41048) -- Allow empty list in TriggerDagRun failed_state (#41249) -- Clean up the exception handler when run_as_user is the airflow user (#41241) -- Collapse docs when click and folded (#41214) -- Update updated_at when saving to db as session.merge does not trigger on-update (#40782) -- Fix query count statistics when parsing DAF file (#41149) -- Method Resolution Order in operators without ``__init__`` (#41086) -- Ensure try_number incremented for empty operator (#40426) - -Miscellaneous -""""""""""""" -- Remove the Experimental flag from ``OTel`` Traces (#40874) -- Bump packaging version to 23.0 in order to fix issue with older otel (#40865) -- Simplify _auth_manager_is_authorized_map function (#40803) -- Use correct unknown executor exception in scheduler job (#40700) -- Add D1 ``pydocstyle`` rules to pyproject.toml (#40569) -- Enable enforcing ``pydocstyle`` rule D213 in ruff. (#40448, #40464) -- Update ``Dag.test()`` to run with an executor if desired (#40205) -- Update jest and babel minor versions (#40203) -- Refactor BashOperator and Bash decorator for consistency and simplicity (#39871) -- Add ``AirflowInternalRuntimeError`` for raise ``non catchable`` errors (#38778) -- ruff version bump 0.4.5 (#39849) -- Bump ``pytest`` to 8.0+ (#39450) -- Remove stale comment about TI index (#39470) -- Configure ``back_populates`` between ``DagScheduleDatasetReference.dag`` and ``DagModel.schedule_dataset_references`` (#39392) -- Remove deprecation warnings in endpoints.py (#39389) -- Fix SQLA deprecations in Airflow core (#39211) -- Use class-bound attribute directly in SA (#39198, #39195) -- Fix stacklevel for TaskContextLogger (#39142) -- Capture warnings during collect DAGs (#39109) -- Resolve ``B028`` (no-explicit-stacklevel) in core (#39123) -- Rename model ``ImportError`` to ``ParseImportError`` for avoid shadowing with builtin exception (#39116) -- Add option to support cloudpickle in PythonVenv/External Operator (#38531) -- Suppress ``SubDagOperator`` examples warnings (#39057) -- Add log for running callback (#38892) -- Use ``model_dump`` instead of ``dict`` for serialize Pydantic V2 model (#38933) -- Widen cheat sheet column to avoid wrapping commands (#38888) -- Update ``hatchling`` to latest version (1.22.5) (#38780) -- bump uv to 0.1.29 (#38758) -- Add missing serializations found during provider tests fixing (#41252) -- Bump ``ws`` from 7.5.5 to 7.5.10 in /airflow/www (#40288) -- Improve typing for allowed/failed_states in TriggerDagRunOperator (#39855) - -Doc Only Changes -"""""""""""""""" -- Add ``filesystems`` and ``dataset-uris`` to "how to create your own provider" page (#40801) -- Fix (TM) to (R) in Airflow repository (#40783) -- Set ``otel_on`` to True in example airflow.cfg (#40712) -- Add warning for _AIRFLOW_PATCH_GEVENT (#40677) -- Update multi-team diagram proposal after Airflow 3 discussions (#40671) -- Add stronger warning that MSSQL is not supported and no longer functional (#40565) -- Fix misleading mac menu structure in howto (#40440) -- Update k8s supported version in docs (#39878) -- Add compatibility note for Listeners (#39544) -- Update edge label image in documentation example with the new graph view (#38802) -- Update UI doc screenshots (#38680) -- Add section "Manipulating queued dataset events through REST API" (#41022) -- Add information about lack of security guarantees for docker compose (#41072) -- Add links to example dags in use params section (#41031) -- Change ``task_id`` from ``send_email`` to ``send_email_notification`` in ``taskflow.rst`` (#41060) -- Remove unnecessary nginx redirect rule from reverse proxy documentation (#38953) - - - -Airflow 2.9.3 (2024-07-15) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Time unit for ``scheduled_duration`` and ``queued_duration`` changed (#37936) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -``scheduled_duration`` and ``queued_duration`` metrics are now emitted in milliseconds instead of seconds. - -By convention all statsd metrics should be emitted in milliseconds, this is later expected in e.g. ``prometheus`` statsd-exporter. - - -Support for OpenTelemetry Metrics is no longer "Experimental" (#40286) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Experimental support for OpenTelemetry was added in 2.7.0 since then fixes and improvements were added and now we announce the feature as stable. - - - -Bug Fixes -""""""""" -- Fix calendar view scroll (#40458) -- Validating provider description for urls in provider list view (#40475) -- Fix compatibility with old MySQL 8.0 (#40314) -- Fix dag (un)pausing won't work on environment where dag files are missing (#40345) -- Extra being passed to SQLalchemy (#40391) -- Handle unsupported operand int + str when value of tag is int (job_id) (#40407) -- Fix TriggeredDagRunOperator triggered link (#40336) -- Add ``[webserver]update_fab_perms`` to deprecated configs (#40317) -- Swap dag run link from legacy graph to grid with graph tab (#40241) -- Change ``httpx`` to ``requests`` in ``file_task_handler`` (#39799) -- Fix import future annotations in venv jinja template (#40208) -- Ensures DAG params order regardless of backend (#40156) -- Use a join for TI notes in TI batch API endpoint (#40028) -- Improve trigger UI for string array format validation (#39993) -- Disable jinja2 rendering for doc_md (#40522) -- Skip checking sub dags list if taskinstance state is skipped (#40578) -- Recognize quotes when parsing urls in logs (#40508) - -Doc Only Changes -"""""""""""""""" -- Add notes about passing secrets via environment variables (#40519) -- Revamp some confusing log messages (#40334) -- Add more precise description of masking sensitive field names (#40512) -- Add slightly more detailed guidance about upgrading to the docs (#40227) -- Metrics allow_list complete example (#40120) -- Add warning to deprecated api docs that access control isn't applied (#40129) -- Simpler command to check local scheduler is alive (#40074) -- Add a note and an example clarifying the usage of DAG-level params (#40541) -- Fix highlight of example code in dags.rst (#40114) -- Add warning about the PostgresOperator being deprecated (#40662) -- Updating airflow download links to CDN based links (#40618) -- Fix import statement for DatasetOrTimetable example (#40601) -- Further clarify triage process (#40536) -- Fix param order in PythonOperator docstring (#40122) -- Update serializers.rst to mention that bytes are not supported (#40597) - -Miscellaneous -""""""""""""" -- Upgrade build installers and dependencies (#40177) -- Bump braces from 3.0.2 to 3.0.3 in /airflow/www (#40180) -- Upgrade to another version of trove-classifier (new CUDA classifiers) (#40564) -- Rename "try_number" increments that are unrelated to the airflow concept (#39317) -- Update trove classifiers to the latest version as build dependency (#40542) -- Upgrade to latest version of ``hatchling`` as build dependency (#40387) -- Fix bug in ``SchedulerJobRunner._process_executor_events`` (#40563) -- Remove logging for "blocked" events (#40446) - - - -Airflow 2.9.2 (2024-06-10) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -No significant changes. - -Bug Fixes -""""""""" -- Fix bug that makes ``AirflowSecurityManagerV2`` leave transactions in the ``idle in transaction`` state (#39935) -- Fix alembic auto-generation and rename mismatching constraints (#39032) -- Add the existing_nullable to the downgrade side of the migration (#39374) -- Fix Mark Instance state buttons stay disabled if user lacks permission (#37451). (#38732) -- Use SKIP LOCKED instead of NOWAIT in mini scheduler (#39745) -- Remove DAG Run Add option from FAB view (#39881) -- Add max_consecutive_failed_dag_runs in API spec (#39830) -- Fix example_branch_operator failing in python 3.12 (#39783) -- Fetch served logs also when task attempt is up for retry and no remote logs available (#39496) -- Change dataset URI validation to raise warning instead of error in Airflow 2.9 (#39670) -- Visible DAG RUN doesn't point to the same dag run id (#38365) -- Refactor ``SafeDogStatsdLogger`` to use ``get_validator`` to enable pattern matching (#39370) -- Fix custom actions in security manager ``has_access`` (#39421) -- Fix HTTP 500 Internal Server Error if DAG is triggered with bad params (#39409) -- Fix static file caching is disabled in Airflow Webserver. (#39345) -- Fix TaskHandlerWithCustomFormatter now adds prefix only once (#38502) -- Do not provide deprecated ``execution_date`` in ``@apply_lineage`` (#39327) -- Add missing conn_id to string representation of ObjectStoragePath (#39313) -- Fix ``sql_alchemy_engine_args`` config example (#38971) -- Add Cache-Control "no-store" to all dynamically generated content (#39550) - -Miscellaneous -""""""""""""" -- Limit ``yandex`` provider to avoid ``mypy`` errors (#39990) -- Warn on mini scheduler failures instead of debug (#39760) -- Change type definition for ``provider_info_cache`` decorator (#39750) -- Better typing for BaseOperator ``defer`` (#39742) -- More typing in TimeSensor and TimeSensorAsync (#39696) -- Re-raise exception from strict dataset URI checks (#39719) -- Fix stacklevel for _log_state helper (#39596) -- Resolve SA warnings in migrations scripts (#39418) -- Remove unused index ``idx_last_scheduling_decision`` on ``dag_run`` table (#39275) - -Doc Only Changes -"""""""""""""""" -- Provide extra tip on labeling DynamicTaskMapping (#39977) -- Improve visibility of links / variables / other configs in Configuration Reference (#39916) -- Remove 'legacy' definition for ``CronDataIntervalTimetable`` (#39780) -- Update plugins.rst examples to use pyproject.toml over setup.py (#39665) -- Fix nit in pg set-up doc (#39628) -- Add Matomo to Tracking User Activity docs (#39611) -- Fix Connection.get -> Connection. get_connection_from_secrets (#39560) -- Adding note for provider dependencies (#39512) -- Update docker-compose command (#39504) -- Update note about restarting triggerer process (#39436) -- Updating S3LogLink with an invalid bucket link (#39424) -- Update testing_packages.rst (#38996) -- Add multi-team diagrams (#38861) - - - -Airflow 2.9.1 (2024-05-03) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Stackdriver logging bugfix requires Google provider ``10.17.0`` or later (#38071) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -If you use Stackdriver logging, you must use Google provider version ``10.17.0`` or later. Airflow ``2.9.1`` now passes ``gcp_log_name`` to the ``StackdriverTaskHandler`` instead of ``name``, and this will fail on earlier provider versions. - -This fixes a bug where the log name configured in ``[logging] remove_base_log_folder`` was overridden when Airflow configured logging, resulting in task logs going to the wrong destination. - - - -Bug Fixes -""""""""" -- Make task log messages include run_id (#39280) -- Copy menu_item ``href`` for nav bar (#39282) -- Fix trigger kwarg encryption migration (#39246, #39361, #39374) -- Add workaround for datetime-local input in ``firefox`` (#39261) -- Add Grid button to Task Instance view (#39223) -- Get served logs when remote or executor logs not available for non-running task try (#39177) -- Fixed side effect of menu filtering causing disappearing menus (#39229) -- Use grid view for Task Instance's ``log_url`` (#39183) -- Improve task filtering ``UX`` (#39119) -- Improve rendered_template ``ux`` in react dag page (#39122) -- Graph view improvements (#38940) -- Check that the dataset<>task exists before trying to render graph (#39069) -- Hostname was "redacted", not "redact"; remove it when there is no context (#39037) -- Check whether ``AUTH_ROLE_PUBLIC`` is set in ``check_authentication`` (#39012) -- Move rendering of ``map_index_template`` so it renders for failed tasks as long as it was defined before the point of failure (#38902) -- ``Undeprecate`` ``BaseXCom.get_one`` method for now (#38991) -- Add ``inherit_cache`` attribute for ``CreateTableAs`` custom SA Clause (#38985) -- Don't wait for DagRun lock in mini scheduler (#38914) -- Fix calendar view with no DAG Run (#38964) -- Changed the background color of external task in graph (#38969) -- Fix dag run selection (#38941) -- Fix ``SAWarning`` 'Coercing Subquery object into a select() for use in IN()' (#38926) -- Fix implicit ``cartesian`` product in AirflowSecurityManagerV2 (#38913) -- Fix problem that links in legacy log view can not be clicked (#38882) -- Fix dag run link params (#38873) -- Use async db calls in WorkflowTrigger (#38689) -- Fix audit log events filter (#38719) -- Use ``methodtools.lru_cache`` instead of ``functools.lru_cache`` in class methods (#37757) -- Raise deprecated warning in ``airflow dags backfill`` only if ``-I`` / ``--ignore-first-depends-on-past`` provided (#38676) - -Miscellaneous -""""""""""""" -- ``TriggerDagRunOperator`` deprecate ``execution_date`` in favor of ``logical_date`` (#39285) -- Force to use Airflow Deprecation warnings categories on ``@deprecated`` decorator (#39205) -- Add warning about run/import Airflow under the Windows (#39196) -- Update ``is_authorized_custom_view`` from auth manager to handle custom actions (#39167) -- Add in Trove classifiers Python 3.12 support (#39004) -- Use debug level for ``minischeduler`` skip (#38976) -- Bump ``undici`` from ``5.28.3 to 5.28.4`` in ``/airflow/www`` (#38751) - - -Doc Only Changes -"""""""""""""""" -- Fix supported k8s version in docs (#39172) -- Dynamic task mapping ``PythonOperator`` op_kwargs (#39242) -- Add link to ``user`` and ``role`` commands (#39224) -- Add ``k8s 1.29`` to supported version in docs (#39168) -- Data aware scheduling docs edits (#38687) -- Update ``DagBag`` class docstring to include all params (#38814) -- Correcting an example taskflow example (#39015) -- Remove decorator from rendering fields example (#38827) - - - -Airflow 2.9.0 (2024-04-08) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Following Listener API methods are considered stable and can be used for production system (were experimental feature in older Airflow versions) (#36376): -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Lifecycle events: - -- ``on_starting`` -- ``before_stopping`` - -DagRun State Change Events: - -- ``on_dag_run_running`` -- ``on_dag_run_success`` -- ``on_dag_run_failed`` - -TaskInstance State Change Events: - -- ``on_task_instance_running`` -- ``on_task_instance_success`` -- ``on_task_instance_failed`` - -Support for Microsoft SQL-Server for Airflow Meta Database has been removed (#36514) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -After `discussion `__ -and a `voting process `__, -the Airflow's PMC members and Committers have reached a resolution to no longer maintain MsSQL as a -supported Database Backend. - -As of Airflow 2.9.0 support of MsSQL has been removed for Airflow Database Backend. - -A migration script which can help migrating the database *before* upgrading to Airflow 2.9.0 is available in -`airflow-mssql-migration repo on Github `_. -Note that the migration script is provided without support and warranty. - -This does not affect the existing provider packages (operators and hooks), DAGs can still access and process data from MsSQL. - -Dataset URIs are now validated on input (#37005) -"""""""""""""""""""""""""""""""""""""""""""""""" - -Datasets must use a URI that conform to rules laid down in AIP-60, and the value -will be automatically normalized when the DAG file is parsed. See -`documentation on Datasets `_ for -a more detailed description on the rules. - -You may need to change your Dataset identifiers if they look like a URI, but are -used in a less mainstream way, such as relying on the URI's auth section, or -have a case-sensitive protocol name. - -The method ``get_permitted_menu_items`` in ``BaseAuthManager`` has been renamed ``filter_permitted_menu_items`` (#37627) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Add REST API actions to Audit Log events (#37734) -""""""""""""""""""""""""""""""""""""""""""""""""" - -The Audit Log ``event`` name for REST API events will be prepended with ``api.`` or ``ui.``, depending on if it came from the Airflow UI or externally. - -Official support for Python 3.12 (#38025) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -There are a few caveats though: - -* Pendulum2 does not support Python 3.12. For Python 3.12 you need to use - `Pendulum 3 `_ - -* Minimum SQLAlchemy version supported when Pandas is installed for Python 3.12 is ``1.4.36`` released in - April 2022. Airflow 2.9.0 increases the minimum supported version of SQLAlchemy to ``1.4.36`` for all - Python versions. - -Not all Providers support Python 3.12. At the initial release of Airflow 2.9.0 the following providers -are released without support for Python 3.12: - - * ``apache.beam`` - pending on `Apache Beam support for 3.12 `_ - * ``papermill`` - pending on Releasing Python 3.12 compatible papermill client version - `including this merged issue `_ - -Prevent large string objects from being stored in the Rendered Template Fields (#38094) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -There's now a limit to the length of data that can be stored in the Rendered Template Fields. -The limit is set to 4096 characters. If the data exceeds this limit, it will be truncated. You can change this limit -by setting the ``[core]max_template_field_length`` configuration option in your airflow config. - -Change xcom table column value type to longblob for MySQL backend (#38401) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Xcom table column ``value`` type has changed from ``blob`` to ``longblob``. This will allow you to store relatively big data in Xcom but process can take a significant amount of time if you have a lot of large data stored in Xcom. - -To downgrade from revision: ``b4078ac230a1``, ensure that you don't have Xcom values larger than 65,535 bytes. Otherwise, you'll need to clean those rows or run ``airflow db clean xcom`` to clean the Xcom table. - -Stronger validation for key parameter defaults in taskflow context variables (#38015) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -As for the taskflow implementation in conjunction with context variable defaults invalid parameter orders can be -generated, it is now not accepted anymore (and validated) that taskflow functions are defined with defaults -other than ``None``. If you have done this before you most likely will see a broken DAG and a error message like -``Error message: Context key parameter my_param can't have a default other than None``. - -New Features -"""""""""""" -- Allow users to write dag_id and task_id in their national characters, added display name for dag / task (v2) (#38446) -- Prevent large objects from being stored in the RTIF (#38094) -- Use current time to calculate duration when end date is not present. (#38375) -- Add average duration mark line in task and dagrun duration charts. (#38214, #38434) -- Add button to manually create dataset events (#38305) -- Add ``Matomo`` as an option for analytics_tool. (#38221) -- Experimental: Support custom weight_rule implementation to calculate the TI priority_weight (#38222) -- Adding ability to automatically set DAG to off after X times it failed sequentially (#36935) -- Add dataset conditions to next run datasets modal (#38123) -- Add task log grouping to UI (#38021) -- Add dataset_expression to grid dag details (#38121) -- Introduce mechanism to support multiple executor configuration (#37635) -- Add color formatting for ANSI chars in logs from task executions (#37985) -- Add the dataset_expression as part of DagModel and DAGDetailSchema (#37826) -- Allow longer rendered_map_index (#37798) -- Inherit the run_ordering from DatasetTriggeredTimetable for DatasetOrTimeSchedule (#37775) -- Implement AIP-60 Dataset URI formats (#37005) -- Introducing Logical Operators for dataset conditional logic (#37101) -- Add post endpoint for dataset events (#37570) -- Show custom instance names for a mapped task in UI (#36797) -- Add excluded/included events to get_event_logs api (#37641) -- Add datasets to dag graph (#37604) -- Show dataset events above task/run details in grid view (#37603) -- Introduce new config variable to control whether DAG processor outputs to stdout (#37439) -- Make Datasets ``hashable`` (#37465) -- Add conditional logic for dataset triggering (#37016) -- Implement task duration page in react. (#35863) -- Add ``queuedEvent`` endpoint to get/delete DatasetDagRunQueue (#37176) -- Support multiple XCom output in the BaseOperator (#37297) -- AIP-58: Add object storage backend for xcom (#37058) -- Introduce ``DatasetOrTimeSchedule`` (#36710) -- Add ``on_skipped_callback`` to ``BaseOperator`` (#36374) -- Allow override of hovered navbar colors (#36631) -- Create new Metrics with Tagging (#36528) -- Add support for openlineage to AFS and common.io (#36410) -- Introduce ``@task.bash`` TaskFlow decorator (#30176, #37875) - -Improvements -"""""""""""" -- More human friendly "show tables" output for db cleanup (#38654) -- Improve trigger assign_unassigned by merging alive_triggerer_ids and get_sorted_triggers queries (#38664) -- Add exclude/include events filters to audit log (#38506) -- Clean up unused triggers in a single query for all dialects except MySQL (#38663) -- Update Confirmation Logic for Config Changes on Sensitive Environments Like Production (#38299) -- Improve datasets graph UX (#38476) -- Only show latest dataset event timestamp after last run (#38340) -- Add button to clear only failed tasks in a dagrun. (#38217) -- Delete all old dag pages and redirect to grid view (#37988) -- Check task attribute before use in sentry.add_tagging() (#37143) -- Mysql change xcom value col type for MySQL backend (#38401) -- ``ExternalPythonOperator`` use version from ``sys.version_info`` (#38377) -- Replace too broad exceptions into the Core (#38344) -- Add CLI support for bulk pause and resume of DAGs (#38265) -- Implement methods on TaskInstancePydantic and DagRunPydantic (#38295, #38302, #38303, #38297) -- Made filters bar collapsible and add a full screen toggle (#38296) -- Encrypt all trigger attributes (#38233, #38358, #38743) -- Upgrade react-table package. Use with Audit Log table (#38092) -- Show if dag page filters are active (#38080) -- Add try number to mapped instance (#38097) -- Add retries to job heartbeat (#37541) -- Add REST API events to Audit Log (#37734) -- Make current working directory as templated field in BashOperator (#37968) -- Add calendar view to react (#37909) -- Add ``run_id`` column to log table (#37731) -- Add ``tryNumber`` to grid task instance tooltip (#37911) -- Session is not used in _do_render_template_fields (#37856) -- Improve MappedOperator property types (#37870) -- Remove provide_session decorator from TaskInstancePydantic methods (#37853) -- Ensure the "airflow.task" logger used for TaskInstancePydantic and TaskInstance (#37857) -- Better error message for internal api call error (#37852) -- Increase tooltip size of dag grid view (#37782) (#37805) -- Use named loggers instead of root logger (#37801) -- Add Run Duration in React (#37735) -- Avoid non-recommended usage of logging (#37792) -- Improve DateTimeTrigger typing (#37694) -- Make sure all unique run_ids render a task duration bar (#37717) -- Add Dag Audit Log to React (#37682) -- Add log event for auto pause (#38243) -- Better message for exception for templated base operator fields (#37668) -- Clean up webserver endpoints adding to audit log (#37580) -- Filter datasets graph by dag_id (#37464) -- Use new exception type inheriting BaseException for SIGTERMs (#37613) -- Refactor dataset class inheritance (#37590) -- Simplify checks for package versions (#37585) -- Filter Datasets by associated dag_ids (GET /datasets) (#37512) -- Enable "airflow tasks test" to run deferrable operator (#37542) -- Make datasets list/graph width adjustable (#37425) -- Speedup determine installed airflow version in ``ExternalPythonOperator`` (#37409) -- Add more task details from rest api (#37394) -- Add confirmation dialog box for DAG run actions (#35393) -- Added shutdown color to the STATE_COLORS (#37295) -- Remove legacy dag details page and redirect to grid (#37232) -- Order XCom entries by map index in API (#37086) -- Add data_interval_start and data_interval_end in dagrun create API endpoint (#36630) -- Making links in task logs as hyperlinks by preventing HTML injection (#36829) -- Improve ExternalTaskSensor Async Implementation (#36916) -- Make Datasets ``Pathlike`` (#36947) -- Simplify query for orphaned tasks (#36566) -- Add deferrable param in FileSensor (#36840) -- Run Trigger Page: Configurable number of recent configs (#36878) -- Merge ``nowait`` and skip_locked into with_row_locks (#36889) -- Return the specified field when get ``dag/dagRun`` in the REST API (#36641) -- Only iterate over the items if debug is enabled for DagFileProcessorManager (#36761) -- Add a fuzzy/regex pattern-matching for metric allow and block list (#36250) -- Allow custom columns in cli dags list (#35250) -- Make it possible to change the default cron timetable (#34851) -- Some improvements to Airflow IO code (#36259) -- Improve TaskInstance typing hints (#36487) -- Remove dependency of ``Connexion`` from auth manager interface (#36209) -- Refactor ExternalDagLink to not create ad hoc TaskInstances (#36135) - -Bug Fixes -""""""""" -- Load providers configuration when gunicorn workers start (#38795) -- Fix grid header rendering (#38720) -- Add a task instance dependency for mapped dependencies (#37498) -- Improve stability of remove_task_decorator function (#38649) -- Mark more fields on API as dump-only (#38616) -- Fix ``total_entries`` count on the event logs endpoint (#38625) -- Add padding to bottom of log block. (#38610) -- Properly serialize nested attrs classes (#38591) -- Fixing the ``tz`` in next run ID info (#38482) -- Show abandoned tasks in Grid View (#38511) -- Apply task instance mutation hook consistently (#38440) -- Override ``chakra`` styles to keep ``dropdowns`` in filter bar (#38456) -- Store duration in seconds and scale to handle case when a value in the series has a larger unit than the preceding durations. (#38374) -- Don't allow defaults other than None in context parameters, and improve error message (#38015) -- Make postgresql default engine args comply with SA 2.0 (#38362) -- Add return statement to yield within a while loop in triggers (#38389) -- Ensure ``__exit__`` is called in decorator context managers (#38383) -- Make the method ``BaseAuthManager.is_authorized_custom_view`` abstract (#37915) -- Add upper limit to planned calendar events calculation (#38310) -- Fix Scheduler in daemon mode doesn't create PID at the specified location (#38117) -- Properly serialize TaskInstancePydantic and DagRunPydantic (#37855) -- Fix graph task state border color (#38084) -- Add back methods removed in security manager (#37997) -- Don't log "403" from worker serve-logs as "Unknown error". (#37933) -- Fix execution data validation error in ``/get_logs_with_metadata`` endpoint (#37756) -- Fix task duration selection (#37630) -- Refrain from passing ``encoding`` to the SQL engine in SQLAlchemy v2 (#37545) -- Fix 'implicitly coercing SELECT object to scalar subquery' in latest dag run statement (#37505) -- Clean up typing with max_execution_date query builder (#36958) -- Optimize max_execution_date query in single dag case (#33242) -- Fix list dags command for get_dagmodel is None (#36739) -- Load ``consuming_dags`` attr eagerly before dataset listener (#36247) - -Miscellaneous -""""""""""""" -- Remove display of param from the UI (#38660) -- Update log level to debug from warning about scheduled_duration metric (#38180) -- Use ``importlib_metadata`` with compat to Python 3.10/3.12 ``stdlib`` (#38366) -- Refactored ``__new__`` magic method of BaseOperatorMeta to avoid bad mixing classic and decorated operators (#37937) -- Use ``sys.version_info`` for determine Python Major.Minor (#38372) -- Add missing deprecated Fab auth manager (#38376) -- Remove unused loop variable from airflow package (#38308) -- Adding max consecutive failed dag runs info in UI (#38229) -- Bump minimum version of ``blinker`` add where it requires (#38140) -- Bump follow-redirects from 1.15.4 to 1.15.6 in /airflow/www (#38156) -- Bump Cryptography to ``> 39.0.0`` (#38112) -- Add Python 3.12 support (#36755, #38025, #36595) -- Avoid use of ``assert`` outside of the tests (#37718) -- Update ObjectStoragePath for universal_pathlib>=v0.2.2 (#37930) -- Resolve G004: Logging statement uses f-string (#37873) -- Update build and install dependencies. (#37910) -- Bump sanitize-html from 2.11.0 to 2.12.1 in /airflow/www (#37833) -- Update to latest installer versions. (#37754) -- Deprecate smtp configs in airflow settings / local_settings (#37711) -- Deprecate PY* constants into the airflow module (#37575) -- Remove usage of deprecated ``flask._request_ctx_stack`` (#37522) -- Remove redundant ``login`` attribute in ``airflow.__init__.py`` (#37565) -- Upgrade to FAB 4.3.11 (#37233) -- Remove SCHEDULED_DEPS which is no longer used anywhere since 2.0.0 (#37140) -- Replace ``datetime.datetime.utcnow`` by ``airflow.utils.timezone.utcnow`` in core (#35448) -- Bump aiohttp min version to avoid CVE-2024-23829 and CVE-2024-23334 (#37110) -- Move config related to FAB auth manager to FAB provider (#36232) -- Remove MSSQL support form Airflow core (#36514) -- Remove ``is_authorized_cluster_activity`` from auth manager (#36175) -- Create FAB provider and move FAB auth manager in it (#35926) - -Doc Only Changes -"""""""""""""""" -- Improve timetable documentation (#38505) -- Reorder OpenAPI Spec tags alphabetically (#38717) -- Update UI screenshots in the documentation (#38680, #38403, #38438, #38435) -- Remove section as it's no longer true with dataset expressions PR (#38370) -- Refactor DatasetOrTimeSchedule timetable docs (#37771) -- Migrate executor docs to respective providers (#37728) -- Add directive to render a list of URI schemes (#37700) -- Add doc page with providers deprecations (#37075) -- Add a cross reference to security policy (#37004) -- Improve AIRFLOW__WEBSERVER__BASE_URL docs (#37003) -- Update faq.rst with (hopefully) clearer description of start_date (#36846) -- Update public interface doc re operators (#36767) -- Add ``exception`` to templates ref list (#36656) -- Add auth manager interface as public interface (#36312) -- Reference fab provider documentation in Airflow documentation (#36310) -- Create auth manager documentation (#36211) -- Update permission docs (#36120) -- Docstring improvement to _covers_every_hour (#36081) -- Add note that task instance, dag and lifecycle listeners are non-experimental (#36376) - - -Airflow 2.8.4 (2024-03-25) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -No significant changes. - -Bug Fixes -""""""""" -- Fix incorrect serialization of ``FixedTimezone`` (#38139) -- Fix excessive permission changing for log task handler (#38164) -- Fix task instances list link (#38096) -- Fix a bug where scheduler heartrate parameter was not used (#37992) -- Add padding to prevent grid horizontal scroll overlapping tasks (#37942) -- Fix hash caching in ``ObjectStoragePath`` (#37769) - -Miscellaneous -""""""""""""" -- Limit ``importlib_resources`` as it breaks ``pytest_rewrites`` (#38095, #38139) -- Limit ``pandas`` to ``<2.2`` (#37748) -- Bump ``croniter`` to fix an issue with 29 Feb cron expressions (#38198) - -Doc Only Changes -"""""""""""""""" -- Tell users what to do if their scanners find issues in the image (#37652) -- Add a section about debugging in Docker Compose with PyCharm (#37940) -- Update deferrable docs to clarify kwargs when trigger resumes operator (#38122) - - -Airflow 2.8.3 (2024-03-11) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -The smtp provider is now pre-installed when you install Airflow. (#37713) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Bug Fixes -""""""""" -- Add "MENU" permission in auth manager (#37881) -- Fix external_executor_id being overwritten (#37784) -- Make more MappedOperator members modifiable (#37828) -- Set parsing context dag_id in dag test command (#37606) - -Miscellaneous -""""""""""""" -- Remove useless methods from security manager (#37889) -- Improve code coverage for TriggerRuleDep (#37680) -- The SMTP provider is now preinstalled when installing Airflow (#37713) -- Bump min versions of openapi validators (#37691) -- Properly include ``airflow_pre_installed_providers.txt`` artifact (#37679) - -Doc Only Changes -"""""""""""""""" -- Clarify lack of sync between workers and scheduler (#37913) -- Simplify some docs around airflow_local_settings (#37835) -- Add section about local settings configuration (#37829) -- Fix docs of ``BranchDayOfWeekOperator`` (#37813) -- Write to secrets store is not supported by design (#37814) -- ``ERD`` generating doc improvement (#37808) -- Update incorrect config value (#37706) -- Update security model to clarify Connection Editing user's capabilities (#37688) -- Fix ImportError on examples dags (#37571) - - -Airflow 2.8.2 (2024-02-26) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -The ``allowed_deserialization_classes`` flag now follows a glob pattern (#36147). -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -For example if one wants to add the class ``airflow.tests.custom_class`` to the -``allowed_deserialization_classes`` list, it can be done by writing the full class -name (``airflow.tests.custom_class``) or a pattern such as the ones used in glob -search (e.g., ``airflow.*``, ``airflow.tests.*``). - -If you currently use a custom regexp path make sure to rewrite it as a glob pattern. - -Alternatively, if you still wish to match it as a regexp pattern, add it under the new -list ``allowed_deserialization_classes_regexp`` instead. - -The audit_logs permissions have been updated for heightened security (#37501). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -This was done under the policy that we do not want users like Viewer, Ops, -and other users apart from Admin to have access to audit_logs. The intention behind -this change is to restrict users with less permissions from viewing user details -like First Name, Email etc. from the audit_logs when they are not permitted to. - -The impact of this change is that the existing users with non admin rights won't be able -to view or access the audit_logs, both from the Browse tab or from the DAG run. - -``AirflowTimeoutError`` is no longer ``except`` by default through ``Exception`` (#35653). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The ``AirflowTimeoutError`` is now inheriting ``BaseException`` instead of -``AirflowException``->``Exception``. -See https://docs.python.org/3/library/exceptions.html#exception-hierarchy - -This prevents code catching ``Exception`` from accidentally -catching ``AirflowTimeoutError`` and continuing to run. -``AirflowTimeoutError`` is an explicit intent to cancel the task, and should not -be caught in attempts to handle the error and return some default value. - -Catching ``AirflowTimeoutError`` is still possible by explicitly ``except``ing -``AirflowTimeoutError`` or ``BaseException``. -This is discouraged, as it may allow the code to continue running even after -such cancellation requests. -Code that previously depended on performing strict cleanup in every situation -after catching ``Exception`` is advised to use ``finally`` blocks or -context managers. To perform only the cleanup and then automatically -re-raise the exception. -See similar considerations about catching ``KeyboardInterrupt`` in -https://docs.python.org/3/library/exceptions.html#KeyboardInterrupt - - -Bug Fixes -""""""""" -- Sort dag processing stats by last_runtime (#37302) -- Allow pre-population of trigger form values via URL parameters (#37497) -- Base date for fetching dag grid view must include selected run_id (#34887) -- Check permissions for ImportError (#37468) -- Move ``IMPORT_ERROR`` from DAG related permissions to view related permissions (#37292) -- Change ``AirflowTaskTimeout`` to inherit ``BaseException`` (#35653) -- Revert "Fix future DagRun rarely triggered by race conditions when max_active_runs reached its upper limit. (#31414)" (#37596) -- Change margin to padding so first task can be selected (#37527) -- Fix Airflow serialization for ``namedtuple`` (#37168) -- Fix bug with clicking url-unsafe tags (#37395) -- Set deterministic and new getter for ``Treeview`` function (#37162) -- Fix permissions of parent folders for log file handler (#37310) -- Fix permission check on DAGs when ``access_entity`` is specified (#37290) -- Fix the value of ``dateTimeAttrFormat`` constant (#37285) -- Resolve handler close race condition at triggerer shutdown (#37206) -- Fixing status icon alignment for various views (#36804) -- Remove superfluous ``@Sentry.enrich_errors`` (#37002) -- Use execution_date= param as a backup to base date for grid view (#37018) -- Handle SystemExit raised in the task. (#36986) -- Revoking audit_log permission from all users except admin (#37501) -- Fix broken regex for allowed_deserialization_classes (#36147) -- Fix the bug that affected the DAG end date. (#36144) -- Adjust node width based on task name length (#37254) -- fix: PythonVirtualenvOperator crashes if any python_callable function is defined in the same source as DAG (#37165) -- Fix collapsed grid width, line up selected bar with gantt (#37205) -- Adjust graph node layout (#37207) -- Revert the sequence of initializing configuration defaults (#37155) -- Displaying "actual" try number in TaskInstance view (#34635) -- Bugfix Triggering DAG with parameters is mandatory when show_trigger_form_if_no_params is enabled (#37063) -- Secret masker ignores passwords with special chars (#36692) -- Fix DagRuns with UPSTREAM_FAILED tasks get stuck in the backfill. (#36954) -- Disable ``dryrun`` auto-fetch (#36941) -- Fix copy button on a DAG run's config (#36855) -- Fix bug introduced by replacing spaces by + in run_id (#36877) -- Fix webserver always redirecting to home page if user was not logged in (#36833) -- REST API set description on POST to ``/variables`` endpoint (#36820) -- Sanitize the conn_id to disallow potential script execution (#32867) -- Fix task id copy button copying wrong id (#34904) -- Fix security manager inheritance in fab provider (#36538) -- Avoid ``pendulum.from_timestamp`` usage (#37160) - -Miscellaneous -""""""""""""" -- Install latest docker ``CLI`` instead of specific one (#37651) -- Bump ``undici`` from ``5.26.3`` to ``5.28.3`` in ``/airflow/www`` (#37493) -- Add Python ``3.12`` exclusions in ``providers/pyproject.toml`` (#37404) -- Remove ``markdown`` from core dependencies (#37396) -- Remove unused ``pageSize`` method. (#37319) -- Add more-itertools as dependency of common-sql (#37359) -- Replace other ``Python 3.11`` and ``3.12`` deprecations (#37478) -- Include ``airflow_pre_installed_providers.txt`` into ``sdist`` distribution (#37388) -- Turn Pydantic into an optional dependency (#37320) -- Limit ``universal-pathlib to < 0.2.0`` (#37311) -- Allow running airflow against sqlite in-memory DB for tests (#37144) -- Add description to ``queue_when`` (#36997) -- Updated ``config.yml`` for environment variable ``sql_alchemy_connect_args`` (#36526) -- Bump min version of ``Alembic to 1.13.1`` (#36928) -- Limit ``flask-session`` to ``<0.6`` (#36895) - -Doc Only Changes -"""""""""""""""" -- Fix upgrade docs to reflect true ``CLI`` flags available (#37231) -- Fix a bug in fundamentals doc (#37440) -- Add redirect for deprecated page (#37384) -- Fix the ``otel`` config descriptions (#37229) -- Update ``Objectstore`` tutorial with ``prereqs`` section (#36983) -- Add more precise description on avoiding generic ``package/module`` names (#36927) -- Add airflow version substitution into Docker Compose Howto (#37177) -- Add clarification about DAG author capabilities to security model (#37141) -- Move docs for cron basics to Authoring and Scheduling section (#37049) -- Link to release notes in the upgrade docs (#36923) -- Prevent templated field logic checks in ``__init__`` of operators automatically (#33786) - - -Airflow 2.8.1 (2024-01-19) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Target version for core dependency ``pendulum`` package set to 3 (#36281). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Support for pendulum 2.1.2 will be saved for a while, presumably until the next feature version of Airflow. -It is advised to upgrade user code to use pendulum 3 as soon as possible. - -Pendulum 3 introduced some subtle incompatibilities that you might rely on in your code - for example -default rendering of dates is missing ``T`` in the rendered date representation, which is not ISO8601 -compliant. If you rely on the default rendering of dates, you might need to adjust your code to use -``isoformat()`` method to render dates in ISO8601 format. - -Airflow packaging specification follows modern Python packaging standards (#36537). -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -We standardized Airflow dependency configuration to follow latest development in Python packaging by -using ``pyproject.toml``. Airflow is now compliant with those accepted PEPs: - -* `PEP-440 Version Identification and Dependency Specification `__ -* `PEP-517 A build-system independent format for source trees `__ -* `PEP-518 Specifying Minimum Build System Requirements for Python Projects `__ -* `PEP-561 Distributing and Packaging Type Information `__ -* `PEP-621 Storing project metadata in pyproject.toml `__ -* `PEP-660 Editable installs for pyproject.toml based builds (wheel based) `__ -* `PEP-685 Comparison of extra names for optional distribution dependencies `__ - -Also we implement multiple license files support coming from Draft, not yet accepted (but supported by ``hatchling``) PEP: -* `PEP 639 Improving License Clarity with Better Package Metadata `__ - -This has almost no noticeable impact on users if they are using modern Python packaging and development tools, generally -speaking Airflow should behave as it did before when installing it from PyPI and it should be much easier to install -it for development purposes using ``pip install -e ".[devel]"``. - -The differences from the user side are: - -* Airflow extras now get extras normalized to ``-`` (following PEP-685) instead of ``_`` and ``.`` - (as it was before in some extras). When you install airflow with such extras (for example ``dbt.core`` or - ``all_dbs``) you should use ``-`` instead of ``_`` and ``.``. - -In most modern tools this will work in backwards-compatible way, but in some old version of those tools you might need to -replace ``_`` and ``.`` with ``-``. You can also get warnings that the extra you are installing does not exist - but usually -this warning is harmless and the extra is installed anyway. It is, however, recommended to change to use ``-`` in extras in your dependency -specifications for all Airflow extras. - -* Released airflow package does not contain ``devel``, ``devel-*``, ``doc`` and ``docs-gen`` extras. - Those extras are only available when you install Airflow from sources in ``--editable`` mode. This is - because those extras are only used for development and documentation building purposes and are not needed - when you install Airflow for production use. Those dependencies had unspecified and varying behaviour for - released packages anyway and you were not supposed to use them in released packages. - -* The ``all`` and ``all-*`` extras were not always working correctly when installing Airflow using constraints - because they were also considered as development-only dependencies. With this change, those dependencies are - now properly handling constraints and they will install properly with constraints, pulling the right set - of providers and dependencies when constraints are used. - -Graphviz dependency is now an optional one, not required one (#36647). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -The ``graphviz`` dependency has been problematic as Airflow required dependency - especially for -ARM-based installations. Graphviz packages require binary graphviz libraries - which is already a -limitation, but they also require to install graphviz Python bindings to be build and installed. -This does not work for older Linux installation but - more importantly - when you try to install -Graphviz libraries for Python 3.8, 3.9 for ARM M1 MacBooks, the packages fail to install because -Python bindings compilation for M1 can only work for Python 3.10+. - -This is not a breaking change technically - the CLIs to render the DAGs is still there and IF you -already have graphviz installed, it will continue working as it did before. The only problem when it -does not work is where you do not have graphviz installed it will raise an error and inform that you need it. - -Graphviz will remain to be installed for most users: - -* the Airflow Image will still contain graphviz library, because - it is added there as extra -* when previous version of Airflow has been installed already, then - graphviz library is already installed there and Airflow will - continue working as it did - -The only change will be a new installation of new version of Airflow from the scratch, where graphviz will -need to be specified as extra or installed separately in order to enable DAG rendering option. - -Bug Fixes -""""""""" -- Fix airflow-scheduler exiting with code 0 on exceptions (#36800) -- Fix Callback exception when a removed task is the last one in the ``taskinstance`` list (#36693) -- Allow anonymous user edit/show resource when set ``AUTH_ROLE_PUBLIC=admin`` (#36750) -- Better error message when sqlite URL uses relative path (#36774) -- Explicit string cast required to force integer-type run_ids to be passed as strings instead of integers (#36756) -- Add log lookup exception for empty ``op`` subtypes (#35536) -- Remove unused index on task instance (#36737) -- Fix check on subclass for ``typing.Union`` in ``_infer_multiple_outputs`` for Python 3.10+ (#36728) -- Make sure ``multiple_outputs`` is inferred correctly even when using ``TypedDict`` (#36652) -- Add back FAB constant in legacy security manager (#36719) -- Fix AttributeError when using ``Dagrun.update_state`` (#36712) -- Do not let ``EventsTimetable`` schedule past events if ``catchup=False`` (#36134) -- Support encryption for triggers parameters (#36492) -- Fix the type hint for ``tis_query`` in ``_process_executor_events`` (#36655) -- Redirect to index when user does not have permission to access a page (#36623) -- Avoid using dict as default value in ``call_regular_interval`` (#36608) -- Remove option to set a task instance to running state in UI (#36518) -- Fix details tab not showing when using dynamic task mapping (#36522) -- Raise error when ``DagRun`` fails while running ``dag test`` (#36517) -- Refactor ``_manage_executor_state`` by refreshing TIs in batch (#36502) -- Add flask config: ``MAX_CONTENT_LENGTH`` (#36401) -- Fix get_leaves calculation for teardown in nested group (#36456) -- Stop serializing timezone-naive datetime to timezone-aware datetime with UTC tz (#36379) -- Make ``kubernetes`` decorator type annotation consistent with operator (#36405) -- Fix Webserver returning 500 for POST requests to ``api/dag/*/dagrun`` from anonymous user (#36275) -- Fix the required access for get_variable endpoint (#36396) -- Fix datetime reference in ``DAG.is_fixed_time_schedule`` (#36370) -- Fix AirflowSkipException message raised by BashOperator (#36354) -- Allow PythonVirtualenvOperator.skip_on_exit_code to be zero (#36361) -- Increase width of execution_date input in trigger.html (#36278) -- Fix logging for pausing DAG (#36182) -- Stop deserializing pickle when enable_xcom_pickling is False (#36255) -- Check DAG read permission before accessing DAG code (#36257) -- Enable mark task as failed/success always (#36254) -- Create latest log dir symlink as relative link (#36019) -- Fix Python-based decorators templating (#36103) - -Miscellaneous -""""""""""""" -- Rename concurrency label to max active tasks (#36691) -- Restore function scoped ``httpx`` import in file_task_handler for performance (#36753) -- Add support of Pendulum 3 (#36281) -- Standardize airflow build process and switch to ``hatchling`` build backend (#36537) -- Get rid of ``pyarrow-hotfix`` for ``CVE-2023-47248`` (#36697) -- Make ``graphviz`` dependency optional (#36647) -- Announce MSSQL support end in Airflow 2.9.0, add migration script hints (#36509) -- Set min ``pandas`` dependency to 1.2.5 for all providers and airflow (#36698) -- Bump follow-redirects from 1.15.3 to 1.15.4 in ``/airflow/www`` (#36700) -- Provide the logger_name param to base hook in order to override the logger name (#36674) -- Fix run type icon alignment with run type text (#36616) -- Follow BaseHook connection fields method signature in FSHook (#36444) -- Remove redundant ``docker`` decorator type annotations (#36406) -- Straighten typing in workday timetable (#36296) -- Use ``batch_is_authorized_dag`` to check if user has permission to read DAGs (#36279) -- Replace deprecated get_accessible_dag_ids and use get_readable_dags in get_dag_warnings (#36256) - -Doc Only Changes -"""""""""""""""" -- Metrics tagging documentation (#36627) -- In docs use logical_date instead of deprecated execution_date (#36654) -- Add section about live-upgrading Airflow (#36637) -- Replace ``numpy`` example with practical exercise demonstrating top-level code (#35097) -- Improve and add more complete description in the architecture diagrams (#36513) -- Improve the error message displayed when there is a webserver error (#36570) -- Update ``dags.rst`` with information on DAG pausing (#36540) -- Update installation prerequisites after upgrading to Debian Bookworm (#36521) -- Add description on the ways how users should approach DB monitoring (#36483) -- Add branching based on mapped task group example to dynamic-task-mapping.rst (#36480) -- Add further details to replacement documentation (#36485) -- Use cards when describing priority weighting methods (#36411) -- Update ``metrics.rst`` for param ``dagrun.schedule_delay`` (#36404) -- Update admonitions in Python operator doc to reflect sentiment (#36340) -- Improve audit_logs.rst (#36213) -- Remove Redshift mention from the list of managed Postgres backends (#36217) - -Airflow 2.8.0 (2023-12-18) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Raw HTML code in DAG docs and DAG params descriptions is disabled by default (#35460) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -To ensure that no malicious javascript can be injected with DAG descriptions or trigger UI forms by DAG authors -a new parameter ``webserver.allow_raw_html_descriptions`` was added with default value of ``False``. -If you trust your DAG authors code and want to allow using raw HTML in DAG descriptions and params, you can restore the previous -behavior by setting the configuration value to ``True``. - -To ensure Airflow is secure by default, the raw HTML support in trigger UI has been super-seeded by markdown support via -the ``description_md`` attribute. If you have been using ``description_html`` please migrate to ``description_md``. -The ``custom_html_form`` is now deprecated. - -New Features -"""""""""""" -- AIP-58: Add Airflow ObjectStore (AFS) (`AIP-58 `_) -- Add XCom tab to Grid (#35719) -- Add "literal" wrapper to disable field templating (#35017) -- Add task context logging feature to allow forwarding messages to task logs (#32646, #32693, #35857) -- Add Listener hooks for Datasets (#34418, #36247) -- Allow override of navbar text color (#35505) -- Add lightweight serialization for deltalake tables (#35462) -- Add support for serialization of iceberg tables (#35456) -- ``prev_end_date_success`` method access (#34528) -- Add task parameter to set custom logger name (#34964) -- Add pyspark decorator (#35247) -- Add trigger as a valid option for the db clean command (#34908) -- Add decorators for external and venv python branching operators (#35043) -- Allow PythonVenvOperator using other index url (#33017) -- Add Python Virtualenv Operator Caching (#33355) -- Introduce a generic export for containerized executor logging (#34903) -- Add ability to clear downstream tis in ``List Task Instances`` view (#34529) -- Attribute ``clear_number`` to track DAG run being cleared (#34126) -- Add BranchPythonVirtualenvOperator (#33356) -- Allow PythonVenvOperator using other index url (#33017) -- Add CLI notification commands to providers (#33116) -- Use dropdown instead of buttons when there are more than 10 retries in log tab (#36025) - -Improvements -"""""""""""" -- Add ``multiselect`` to run state in grid view (#35403) -- Fix warning message in ``Connection.get_hook`` in case of ImportError (#36005) -- Add processor_subdir to import_error table to handle multiple dag processors (#35956) -- Consolidate the call of change_state to fail or success in the core executors (#35901) -- Relax mandatory requirement for start_date when schedule=None (#35356) -- Use ExitStack to manage mutation of secrets_backend_list in dag.test (#34620) -- improved visibility of tasks in ActionModal for ``taskinstance`` (#35810) -- Create directories based on ``AIRFLOW_CONFIG`` path (#35818) -- Implements ``JSON-string`` connection representation generator (#35723) -- Move ``BaseOperatorLink`` into the separate module (#35032) -- Set mark_end_on_close after set_context (#35761) -- Move external logs links to top of react logs page (#35668) -- Change terminal mode to ``cbreak`` in ``execute_interactive`` and handle ``SIGINT`` (#35602) -- Make raw HTML descriptions configurable (#35460) -- Allow email field to be templated (#35546) -- Hide logical date and run id in trigger UI form (#35284) -- Improved instructions for adding dependencies in TaskFlow (#35406) -- Add optional exit code to list import errors (#35378) -- Limit query result on DB rather than client in ``synchronize_log_template`` function (#35366) -- Allow description to be passed in when using variables CLI (#34791) -- Allow optional defaults in required fields with manual triggered dags (#31301) -- Permitting airflow kerberos to run in different modes (#35146) -- Refactor commands to unify daemon context handling (#34945) -- Add extra fields to plugins endpoint (#34913) -- Add description to pools view (#34862) -- Move cli's Connection export and Variable export command print logic to a separate function (#34647) -- Extract and reuse get_kerberos_principle func from get_kerberos_principle (#34936) -- Change type annotation for ``BaseOperatorLink.operators`` (#35003) -- Optimise and migrate to ``SA2-compatible`` syntax for TaskReschedule (#33720) -- Consolidate the permissions name in SlaMissModelView (#34949) -- Add debug log saying what's being run to ``EventScheduler`` (#34808) -- Increase log reader stream loop sleep duration to 1 second (#34789) -- Resolve pydantic deprecation warnings re ``update_forward_refs`` (#34657) -- Unify mapped task group lookup logic (#34637) -- Allow filtering event logs by attributes (#34417) -- Make connection login and password TEXT (#32815) -- Ban import ``Dataset`` from ``airflow`` package in codebase (#34610) -- Use ``airflow.datasets.Dataset`` in examples and tests (#34605) -- Enhance task status visibility (#34486) -- Simplify DAG trigger UI (#34567) -- Ban import AirflowException from airflow (#34512) -- Add descriptions for airflow resource config parameters (#34438) -- Simplify trigger name expression (#34356) -- Move definition of Pod*Exceptions to pod_generator (#34346) -- Add deferred tasks to the cluster_activity view Pools Slots (#34275) -- heartbeat failure log message fix (#34160) -- Rename variables for dag runs (#34049) -- Clarify new_state in OpenAPI spec (#34056) -- Remove ``version`` top-level element from docker compose files (#33831) -- Remove generic trigger cancelled error log (#33874) -- Use ``NOT EXISTS`` subquery instead of ``tuple_not_in_condition`` (#33527) -- Allow context key args to not provide a default (#33430) -- Order triggers by - TI priority_weight when assign unassigned triggers (#32318) -- Add metric ``triggerer_heartbeat`` (#33320) -- Allow ``airflow variables export`` to print to stdout (#33279) -- Workaround failing deadlock when running backfill (#32991) -- add dag_run_ids and task_ids filter for the batch task instance API endpoint (#32705) -- Configurable health check threshold for triggerer (#33089) -- Rework provider manager to treat Airflow core hooks like other provider hooks (#33051) -- Ensure DAG-level references are filled on unmap (#33083) -- Affix webserver access_denied warning to be configurable (#33022) -- Add support for arrays of different data types in the Trigger Form UI (#32734) -- Add a mechanism to warn if executors override existing CLI commands (#33423) - -Bug Fixes -""""""""" -- Account for change in UTC offset when calculating next schedule (#35887) -- Add read access to pools for viewer role (#35352) -- Fix gantt chart queued duration when queued_dttm is greater than start_date for deferred tasks (#35984) -- Avoid crushing container when directory is not found on rm (#36050) -- Update ``reset_user_sessions`` to work from either CLI or web (#36056) -- Fix UI Grid error when DAG has been removed. (#36028) -- Change Trigger UI to use HTTP POST in web ui (#36026) -- Fix airflow db shell needing an extra key press to exit (#35982) -- Change dag grid ``overscroll`` behaviour to auto (#35717) -- Run triggers inline with dag test (#34642) -- Add ``borderWidthRight`` to grid for Firefox ``scrollbar`` (#35346) -- Fix for infinite recursion due to secrets_masker (#35048) -- Fix write ``processor_subdir`` in serialized_dag table (#35661) -- Reload configuration for standalone dag file processor (#35725) -- Long custom operator name overflows in graph view (#35382) -- Add try_number to extra links query (#35317) -- Prevent assignment of non JSON serializable values to DagRun.conf dict (#35096) -- Numeric values in DAG details are incorrectly rendered as timestamps (#35538) -- Fix Scheduler and triggerer crashes in daemon mode when statsd metrics are enabled (#35181) -- Infinite UI redirection loop after deactivating an active user (#35486) -- Bug fix fetch_callback of Partial Subset DAG (#35256) -- Fix DagRun data interval for DeltaDataIntervalTimetable (#35391) -- Fix query in ``get_dag_by_pickle`` util function (#35339) -- Fix TriggerDagRunOperator failing to trigger subsequent runs when reset_dag_run=True (#35429) -- Fix weight_rule property type in ``mappedoperator`` (#35257) -- Bugfix/prevent concurrency with cached venv (#35258) -- Fix dag serialization (#34042) -- Fix py/url-redirection by replacing request.referrer by get_redirect() (#34237) -- Fix updating variables during variable imports (#33932) -- Use Literal from airflow.typing_compat in Airflow core (#33821) -- Always use ``Literal`` from ``typing_extensions`` (#33794) - -Miscellaneous -""""""""""""" -- Change default MySQL client to MariaDB (#36243) -- Mark daskexecutor provider as removed (#35965) -- Bump FAB to ``4.3.10`` (#35991) -- Mark daskexecutor provider as removed (#35965) -- Rename ``Connection.to_json_dict`` to ``Connection.to_dict`` (#35894) -- Upgrade to Pydantic v2 (#35551) -- Bump ``moto`` version to ``>= 4.2.9`` (#35687) -- Use ``pyarrow-hotfix`` to mitigate CVE-2023-47248 (#35650) -- Bump ``axios`` from ``0.26.0 to 1.6.0`` in ``/airflow/www/`` (#35624) -- Make docker decorator's type annotation consistent with operator (#35568) -- Add default to ``navbar_text_color`` and ``rm`` condition in style (#35553) -- Avoid initiating session twice in ``dag_next_execution`` (#35539) -- Work around typing issue in examples and providers (#35494) -- Enable ``TCH004`` and ``TCH005`` rules (#35475) -- Humanize log output about retrieved DAG(s) (#35338) -- Switch from Black to Ruff formatter (#35287) -- Upgrade to Flask Application Builder 4.3.9 (#35085) -- D401 Support (#34932, #34933) -- Use requires_access to check read permission on dag instead of checking it explicitly (#34940) -- Deprecate lazy import ``AirflowException`` from airflow (#34541) -- View util refactoring on mapped stuff use cases (#34638) -- Bump ``postcss`` from ``8.4.25 to 8.4.31`` in ``/airflow/www`` (#34770) -- Refactor Sqlalchemy queries to 2.0 style (#34763, #34665, #32883, #35120) -- Change to lazy loading of io in pandas serializer (#34684) -- Use ``airflow.models.dag.DAG`` in examples (#34617) -- Use airflow.exceptions.AirflowException in core (#34510) -- Check that dag_ids passed in request are consistent (#34366) -- Refactors to make code better (#34278, #34113, #34110, #33838, #34260, #34409, #34377, #34350) -- Suspend qubole provider (#33889) -- Generate Python API docs for Google ADS (#33814) -- Improve importing in modules (#33812, #33811, #33810, #33806, #33807, #33805, #33804, #33803, - #33801, #33799, #33800, #33797, #33798, #34406, #33808) -- Upgrade Elasticsearch to 8 (#33135) - -Doc Only Changes -"""""""""""""""" -- Add support for tabs (and other UX components) to docs (#36041) -- Replace architecture diagram of Airflow with diagrams-generated one (#36035) -- Add the section describing the security model of DAG Author capabilities (#36022) -- Enhance docs for zombie tasks (#35825) -- Reflect drop/add support of DB Backends versions in documentation (#35785) -- More detail on mandatory task arguments (#35740) -- Indicate usage of the ``re2`` regex engine in the .airflowignore documentation. (#35663) -- Update ``best-practices.rst`` (#35692) -- Update ``dag-run.rst`` to mention Airflow's support for extended cron syntax through croniter (#35342) -- Update ``webserver.rst`` to include information of supported OAuth2 providers (#35237) -- Add back dag_run to docs (#35142) -- Fix ``rst`` code block format (#34708) -- Add typing to concrete taskflow examples (#33417) -- Add concrete examples for accessing context variables from TaskFlow tasks (#33296) -- Fix links in security docs (#33329) - - -Airflow 2.7.3 (2023-11-06) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -No significant changes. - -Bug Fixes -""""""""" -- Fix pre-mature evaluation of tasks in mapped task group (#34337) -- Add TriggerRule missing value in rest API (#35194) -- Fix Scheduler crash looping when dagrun creation fails (#35135) -- Fix test connection with ``codemirror`` and extra (#35122) -- Fix usage of cron-descriptor since BC in v1.3.0 (#34836) -- Fix ``get_plugin_info`` for class based listeners. (#35022) -- Some improvements/fixes for dag_run and task_instance endpoints (#34942) -- Fix the dags count filter in webserver home page (#34944) -- Return only the TIs of the readable dags when ~ is provided as a dag_id (#34939) -- Fix triggerer thread crash in daemon mode (#34931) -- Fix wrong plugin schema (#34858) -- Use DAG timezone in TimeSensorAsync (#33406) -- Mark tasks with ``all_skipped`` trigger rule as ``skipped`` if any task is in ``upstream_failed`` state (#34392) -- Add read only validation to read only fields (#33413) - -Misc/Internal -""""""""""""" -- Improve testing harness to separate DB and non-DB tests (#35160, #35333) -- Add pytest db_test markers to our tests (#35264) -- Add pip caching for faster build (#35026) -- Upper bound ``pendulum`` requirement to ``<3.0`` (#35336) -- Limit ``sentry_sdk`` to ``1.33.0`` (#35298) -- Fix subtle bug in mocking processor_agent in our tests (#35221) -- Bump ``@babel/traverse`` from ``7.16.0 to 7.23.2`` in ``/airflow/www`` (#34988) -- Bump ``undici`` from ``5.19.1 to 5.26.3`` in ``/airflow/www`` (#34971) -- Remove unused set from ``SchedulerJobRunner`` (#34810) -- Remove warning about ``max_tis per query > parallelism`` (#34742) -- Improve modules import in Airflow core by moving some of them into a type-checking block (#33755) -- Fix tests to respond to Python 3.12 handling of utcnow in sentry-sdk (#34946) -- Add ``connexion<3.0`` upper bound (#35218) -- Limit Airflow to ``< 3.12`` (#35123) -- update moto version (#34938) -- Limit WTForms to below ``3.1.0`` (#34943) - -Doc Only Changes -"""""""""""""""" -- Fix variables substitution in Airflow Documentation (#34462) -- Added example for defaults in ``conn.extras`` (#35165) -- Update datasets.rst issue with running example code (#35035) -- Remove ``mysql-connector-python`` from recommended MySQL driver (#34287) -- Fix syntax error in task dependency ``set_downstream`` example (#35075) -- Update documentation to enable test connection (#34905) -- Update docs errors.rst - Mention sentry "transport" configuration option (#34912) -- Update dags.rst to put SubDag deprecation note right after the SubDag section heading (#34925) -- Add info on getting variables and config in custom secrets backend (#34834) -- Document BaseExecutor interface in more detail to help users in writing custom executors (#34324) -- Fix broken link to ``airflow_local_settings.py`` template (#34826) -- Fixes python_callable function assignment context kwargs example in params.rst (#34759) -- Add missing multiple_outputs=True param in the TaskFlow example (#34812) -- Remove extraneous ``'>'`` in provider section name (#34813) -- Fix imports in extra link documentation (#34547) - - - -Airflow 2.7.2 (2023-10-12) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -No significant changes - - -Bug Fixes -""""""""" -- Check if the lower of provided values are sensitives in config endpoint (#34712) -- Add support for ZoneInfo and generic UTC to fix datetime serialization (#34683, #34804) -- Fix AttributeError: 'Select' object has no attribute 'count' during the airflow db migrate command (#34348) -- Make dry run optional for patch task instance (#34568) -- Fix non deterministic datetime deserialization (#34492) -- Use iterative loop to look for mapped parent (#34622) -- Fix is_parent_mapped value by checking if any of the parent ``taskgroup`` is mapped (#34587) -- Avoid top-level airflow import to avoid circular dependency (#34586) -- Add more exemptions to lengthy metric list (#34531) -- Fix dag warning endpoint permissions (#34355) -- Fix task instance access issue in the batch endpoint (#34315) -- Correcting wrong time showing in grid view (#34179) -- Fix www ``cluster_activity`` view not loading due to ``standaloneDagProcessor`` templating (#34274) -- Set ``loglevel=DEBUG`` in 'Not syncing ``DAG-level`` permissions' (#34268) -- Make param validation consistent for DAG validation and triggering (#34248) -- Ensure details panel is shown when any tab is selected (#34136) -- Fix issues related to ``access_control={}`` (#34114) -- Fix not found ``ab_user`` table in the CLI session (#34120) -- Fix FAB-related logging format interpolation (#34139) -- Fix query bug in ``next_run_datasets_summary`` endpoint (#34143) -- Fix for TaskGroup toggles for duplicated labels (#34072) -- Fix the required permissions to clear a TI from the UI (#34123) -- Reuse ``_run_task_session`` in mapped ``render_template_fields`` (#33309) -- Fix scheduler logic to plan new dag runs by ignoring manual runs (#34027) -- Add missing audit logs for Flask actions add, edit and delete (#34090) -- Hide Irrelevant Dag Processor from Cluster Activity Page (#33611) -- Remove infinite animation for pinwheel, spin for 1.5s (#34020) -- Restore rendering of provider configuration with ``version_added`` (#34011) - -Doc Only Changes -"""""""""""""""" -- Clarify audit log permissions (#34815) -- Add explanation for Audit log users (#34814) -- Import ``AUTH_REMOTE_USER`` from FAB in WSGI middleware example (#34721) -- Add information about drop support MsSQL as DB Backend in the future (#34375) -- Document how to use the system's timezone database (#34667) -- Clarify what landing time means in doc (#34608) -- Fix screenshot in dynamic task mapping docs (#34566) -- Fix class reference in Public Interface documentation (#34454) -- Clarify var.value.get and var.json.get usage (#34411) -- Schedule default value description (#34291) -- Docs for triggered_dataset_event (#34410) -- Add DagRun events (#34328) -- Provide tabular overview about trigger form param types (#34285) -- Add link to Amazon Provider Configuration in Core documentation (#34305) -- Add "security infrastructure" paragraph to security model (#34301) -- Change links to SQLAlchemy 1.4 (#34288) -- Add SBOM entry in security documentation (#34261) -- Added more example code for XCom push and pull (#34016) -- Add state utils to Public Airflow Interface (#34059) -- Replace markdown style link with rst style link (#33990) -- Fix broken link to the "UPDATING.md" file (#33583) - -Misc/Internal -""""""""""""" -- Update min-sqlalchemy version to account for latest features used (#34293) -- Fix SesssionExemptMixin spelling (#34696) -- Restrict ``astroid`` version < 3 (#34658) -- Fail dag test if defer without triggerer (#34619) -- Fix connections exported output (#34640) -- Don't run isort when creating new alembic migrations (#34636) -- Deprecate numeric type python version in PythonVirtualEnvOperator (#34359) -- Refactor ``os.path.splitext`` to ``Path.*`` (#34352, #33669) -- Replace = by is for type comparison (#33983) -- Refactor integer division (#34180) -- Refactor: Simplify comparisons (#34181) -- Refactor: Simplify string generation (#34118) -- Replace unnecessary dict comprehension with dict() in core (#33858) -- Change "not all" to "any" for ease of readability (#34259) -- Replace assert by if...raise in code (#34250, #34249) -- Move default timezone to except block (#34245) -- Combine similar if logic in core (#33988) -- Refactor: Consolidate import and usage of random (#34108) -- Consolidate importing of os.path.* (#34060) -- Replace sequence concatenation by unpacking in Airflow core (#33934) -- Refactor unneeded 'continue' jumps around the repo (#33849, #33845, #33846, #33848, #33839, #33844, #33836, #33842) -- Remove [project] section from ``pyproject.toml`` (#34014) -- Move the try outside the loop when this is possible in Airflow core (#33975) -- Replace loop by any when looking for a positive value in core (#33985) -- Do not create lists we don't need (#33519) -- Remove useless string join from core (#33969) -- Add TCH001 and TCH002 rules to pre-commit to detect and move type checking modules (#33865) -- Add cancel_trigger_ids to to_cancel dequeue in batch (#33944) -- Avoid creating unnecessary list when parsing stats datadog tags (#33943) -- Replace dict.items by dict.values when key is not used in core (#33940) -- Replace lambdas with comprehensions (#33745) -- Improve modules import in Airflow core by some of them into a type-checking block (#33755) -- Refactor: remove unused state - SHUTDOWN (#33746, #34063, #33893) -- Refactor: Use in-place .sort() (#33743) -- Use literal dict instead of calling dict() in Airflow core (#33762) -- remove unnecessary map and rewrite it using list in Airflow core (#33764) -- Replace lambda by a def method in Airflow core (#33758) -- Replace type func by ``isinstance`` in fab_security manager (#33760) -- Replace single quotes by double quotes in all Airflow modules (#33766) -- Merge multiple ``isinstance`` calls for the same object in a single call (#33767) -- Use a single statement with multiple contexts instead of nested statements in core (#33769) -- Refactor: Use f-strings (#33734, #33455) -- Refactor: Use random.choices (#33631) -- Use ``str.splitlines()`` to split lines (#33592) -- Refactor: Remove useless str() calls (#33629) -- Refactor: Improve detection of duplicates and list sorting (#33675) -- Simplify conditions on ``len()`` (#33454) - - -Airflow 2.7.1 (2023-09-07) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -CronTriggerTimetable is now less aggressive when trying to skip a run (#33404) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -When setting ``catchup=False``, CronTriggerTimetable no longer skips a run if -the scheduler does not query the timetable immediately after the previous run -has been triggered. - -This should not affect scheduling in most cases, but can change the behaviour if -a DAG is paused-unpaused to manually skip a run. Previously, the timetable (with -``catchup=False``) would only start a run after a DAG is unpaused, but with this -change, the scheduler would try to look at little bit back to schedule the -previous run that covers a part of the period when the DAG was paused. This -means you will need to keep a DAG paused longer (namely, for the entire cron -period to pass) to really skip a run. - -Note that this is also the behaviour exhibited by various other cron-based -scheduling tools, such as ``anacron``. - -``conf.set()`` becomes case insensitive to match ``conf.get()`` behavior (#33452) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Also, ``conf.get()`` will now break if used with non-string parameters. - -``conf.set(section, key, value)`` used to be case sensitive, i.e. ``conf.set("SECTION", "KEY", value)`` -and ``conf.set("section", "key", value)`` were stored as two distinct configurations. -This was inconsistent with the behavior of ``conf.get(section, key)``, which was always converting the section and key to lower case. - -As a result, configuration options set with upper case characters in the section or key were unreachable. -That's why we are now converting section and key to lower case in ``conf.set`` too. - -We also changed a bit the behavior of ``conf.get()``. It used to allow objects that are not strings in the section or key. -Doing this will now result in an exception. For instance, ``conf.get("section", 123)`` needs to be replaced with ``conf.get("section", "123")``. - -Bug Fixes -""""""""" -- Ensure that tasks wait for running indirect setup (#33903) -- Respect "soft_fail" for core async sensors (#33403) -- Differentiate 0 and unset as a default param values (#33965) -- Raise 404 from Variable PATCH API if variable is not found (#33885) -- Fix ``MappedTaskGroup`` tasks not respecting upstream dependency (#33732) -- Add limit 1 if required first value from query result (#33672) -- Fix UI DAG counts including deleted DAGs (#33778) -- Fix cleaning zombie RESTARTING tasks (#33706) -- ``SECURITY_MANAGER_CLASS`` should be a reference to class, not a string (#33690) -- Add back ``get_url_for_login`` in security manager (#33660) -- Fix ``2.7.0 db`` migration job errors (#33652) -- Set context inside templates (#33645) -- Treat dag-defined access_control as authoritative if defined (#33632) -- Bind engine before attempting to drop archive tables (#33622) -- Add a fallback in case no first name and last name are set (#33617) -- Sort data before ``groupby`` in TIS duration calculation (#33535) -- Stop adding values to rendered templates UI when there is no dagrun (#33516) -- Set strict to True when parsing dates in webserver views (#33512) -- Use ``dialect.name`` in custom SA types (#33503) -- Do not return ongoing dagrun when a ``end_date`` is less than ``utcnow`` (#33488) -- Fix a bug in ``formatDuration`` method (#33486) -- Make ``conf.set`` case insensitive (#33452) -- Allow timetable to slightly miss catchup cutoff (#33404) -- Respect ``soft_fail`` argument when ``poke`` is called (#33401) -- Create a new method used to resume the task in order to implement specific logic for operators (#33424) -- Fix DagFileProcessor interfering with dags outside its ``processor_subdir`` (#33357) -- Remove the unnecessary ``
`` text in Provider's view (#33326) -- Respect ``soft_fail`` argument when ExternalTaskSensor runs in deferrable mode (#33196) -- Fix handling of default value and serialization of Param class (#33141) -- Check if the dynamically-added index is in the table schema before adding (#32731) -- Fix rendering the mapped parameters when using ``expand_kwargs`` method (#32272) -- Fix dependencies for celery and opentelemetry for Python 3.8 (#33579) - -Misc/Internal -""""""""""""" -- Bring back ``Pydantic`` 1 compatibility (#34081, #33998) -- Use a trimmed version of README.md for PyPI (#33637) -- Upgrade to ``Pydantic`` 2 (#33956) -- Reorganize ``devel_only`` extra in Airflow's setup.py (#33907) -- Bumping ``FAB`` to ``4.3.4`` in order to fix issues with filters (#33931) -- Add minimum requirement for ``sqlalchemy to 1.4.24`` (#33892) -- Update version_added field for configs in config file (#33509) -- Replace ``OrderedDict`` with plain dict (#33508) -- Consolidate import and usage of itertools (#33479) -- Static check fixes (#33462) -- Import utc from datetime and normalize its import (#33450) -- D401 Support (#33352, #33339, #33337, #33336, #33335, #33333, #33338) -- Fix some missing type hints (#33334) -- D205 Support - Stragglers (#33301, #33298, #33297) -- Refactor: Simplify code (#33160, #33270, #33268, #33267, #33266, #33264, #33292, #33453, #33476, #33567, - #33568, #33480, #33753, #33520, #33623) -- Fix ``Pydantic`` warning about ``orm_mode`` rename (#33220) -- Add MySQL 8.1 to supported versions. (#33576) -- Remove ``Pydantic`` limitation for version < 2 (#33507) - -Doc only changes -""""""""""""""""" -- Add documentation explaining template_ext (and how to override it) (#33735) -- Explain how users can check if python code is top-level (#34006) -- Clarify that DAG authors can also run code in DAG File Processor (#33920) -- Fix broken link in Modules Management page (#33499) -- Fix secrets backend docs (#33471) -- Fix config description for base_log_folder (#33388) - - -Airflow 2.7.0 (2023-08-18) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Remove Python 3.7 support (#30963) -"""""""""""""""""""""""""""""""""" -As of now, Python 3.7 is no longer supported by the Python community. -Therefore, to use Airflow 2.7.0, you must ensure your Python version is -either 3.8, 3.9, 3.10, or 3.11. - -Old Graph View is removed (#32958) -"""""""""""""""""""""""""""""""""" -The old Graph View is removed. The new Graph View is the default view now. - -The trigger UI form is skipped in web UI if no parameters are defined in a DAG (#33351) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -If you are using ``dag_run.conf`` dictionary and web UI JSON entry to run your DAG you should either: - -* `Add params to your DAG `_ -* Enable the new configuration ``show_trigger_form_if_no_params`` to bring back old behaviour - -The "db init", "db upgrade" commands and "[database] load_default_connections" configuration options are deprecated (#33136). -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Instead, you should use "airflow db migrate" command to create or upgrade database. This command will not create default connections. -In order to create default connections you need to run "airflow connections create-default-connections" explicitly, -after running "airflow db migrate". - -In case of SMTP SSL connection, the context now uses the "default" context (#33070) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -The "default" context is Python's ``default_ssl_contest`` instead of previously used "none". The -``default_ssl_context`` provides a balance between security and compatibility but in some cases, -when certificates are old, self-signed or misconfigured, it might not work. This can be configured -by setting "ssl_context" in "email" configuration of Airflow. - -Setting it to "none" brings back the "none" setting that was used in Airflow 2.6 and before, -but it is not recommended due to security reasons ad this setting disables validation of certificates and allows MITM attacks. - -Disable default allowing the testing of connections in UI, API and CLI(#32052) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -For security reasons, the test connection functionality is disabled by default across Airflow UI, -API and CLI. The availability of the functionality can be controlled by the -``test_connection`` flag in the ``core`` section of the Airflow -configuration (``airflow.cfg``). It can also be controlled by the -environment variable ``AIRFLOW__CORE__TEST_CONNECTION``. - -The following values are accepted for this config param: -1. ``Disabled``: Disables the test connection functionality and -disables the Test Connection button in the UI. - -This is also the default value set in the Airflow configuration. -2. ``Enabled``: Enables the test connection functionality and -activates the Test Connection button in the UI. - -3. ``Hidden``: Disables the test connection functionality and -hides the Test Connection button in UI. - -For more information on capabilities of users, see the documentation: -https://airflow.apache.org/docs/apache-airflow/stable/security/security_model.html#capabilities-of-authenticated-ui-users -It is strongly advised to **not** enable the feature until you make sure that only -highly trusted UI/API users have "edit connection" permissions. - -The ``xcomEntries`` API disables support for the ``deserialize`` flag by default (#32176) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -For security reasons, the ``/dags/*/dagRuns/*/taskInstances/*/xcomEntries/*`` -API endpoint now disables the ``deserialize`` option to deserialize arbitrary -XCom values in the webserver. For backward compatibility, server admins may set -the ``[api] enable_xcom_deserialize_support`` config to *True* to enable the -flag and restore backward compatibility. - -However, it is strongly advised to **not** enable the feature, and perform -deserialization at the client side instead. - -Change of the default Celery application name (#32526) -"""""""""""""""""""""""""""""""""""""""""""""""""""""" -Default name of the Celery application changed from ``airflow.executors.celery_executor`` to ``airflow.providers.celery.executors.celery_executor``. - -You should change both your configuration and Health check command to use the new name: - * in configuration (``celery_app_name`` configuration in ``celery`` section) use ``airflow.providers.celery.executors.celery_executor`` - * in your Health check command use ``airflow.providers.celery.executors.celery_executor.app`` - - -The default value for ``scheduler.max_tis_per_query`` is changed from 512 to 16 (#32572) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -This change is expected to make the Scheduler more responsive. - -``scheduler.max_tis_per_query`` needs to be lower than ``core.parallelism``. -If both were left to their default value previously, the effective default value of ``scheduler.max_tis_per_query`` was 32 -(because it was capped at ``core.parallelism``). - -To keep the behavior as close as possible to the old config, one can set ``scheduler.max_tis_per_query = 0``, -in which case it'll always use the value of ``core.parallelism``. - -Some executors have been moved to corresponding providers (#32767) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -In order to use the executors, you need to install the providers: - -* for Celery executors you need to install ``apache-airflow-providers-celery`` package >= 3.3.0 -* for Kubernetes executors you need to install ``apache-airflow-providers-cncf-kubernetes`` package >= 7.4.0 -* For Dask executors you need to install ``apache-airflow-providers-daskexecutor`` package in any version - -You can achieve it also by installing airflow with ``[celery]``, ``[cncf.kubernetes]``, ``[daskexecutor]`` extras respectively. - -Users who base their images on the ``apache/airflow`` reference image (not slim) should be unaffected - the base -reference image comes with all the three providers installed. - -Improvement Changes -^^^^^^^^^^^^^^^^^^^ - -PostgreSQL only improvement: Added index on taskinstance table (#30762) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -This index seems to have great positive effect in a setup with tens of millions such rows. - -New Features -"""""""""""" -- Add OpenTelemetry to Airflow (`AIP-49 `_) -- Trigger Button - Implement Part 2 of AIP-50 (#31583) -- Removing Executor Coupling from Core Airflow (`AIP-51 `_) -- Automatic setup and teardown tasks (`AIP-52 `_) -- OpenLineage in Airflow (`AIP-53 `_) -- Experimental: Add a cache to Variable and Connection when called at dag parsing time (#30259) -- Enable pools to consider deferred tasks (#32709) -- Allows to choose SSL context for SMTP connection (#33070) -- New gantt tab (#31806) -- Load plugins from providers (#32692) -- Add ``BranchExternalPythonOperator`` (#32787, #33360) -- Add option for storing configuration description in providers (#32629) -- Introduce Heartbeat Parameter to Allow ``Per-LocalTaskJob`` Configuration (#32313) -- Add Executors discovery and documentation (#32532) -- Add JobState for job state constants (#32549) -- Add config to disable the 'deserialize' XCom API flag (#32176) -- Show task instance in web UI by custom operator name (#31852) -- Add default_deferrable config (#31712) -- Introducing ``AirflowClusterPolicySkipDag`` exception (#32013) -- Use ``reactflow`` for datasets graph (#31775) -- Add an option to load the dags from db for command tasks run (#32038) -- Add version of ``chain`` which doesn't require matched lists (#31927) -- Use operator_name instead of task_type in UI (#31662) -- Add ``--retry`` and ``--retry-delay`` to ``airflow db check`` (#31836) -- Allow skipped task state task_instance_schema.py (#31421) -- Add a new config for celery result_backend engine options (#30426) -- UI Add Cluster Activity Page (#31123, #32446) -- Adding keyboard shortcuts to common actions (#30950) -- Adding more information to kubernetes executor logs (#29929) -- Add support for configuring custom alembic file (#31415) -- Add running and failed status tab for DAGs on the UI (#30429) -- Add multi-select, proposals and labels for trigger form (#31441) -- Making webserver config customizable (#29926) -- Render DAGCode in the Grid View as a tab (#31113) -- Add rest endpoint to get option of configuration (#31056) -- Add ``section`` query param in get config rest API (#30936) -- Create metrics to track ``Scheduled->Queued->Running`` task state transition times (#30612) -- Mark Task Groups as Success/Failure (#30478) -- Add CLI command to list the provider trigger info (#30822) -- Add Fail Fast feature for DAGs (#29406) - -Improvements -"""""""""""" -- Improve graph nesting logic (#33421) -- Configurable health check threshold for triggerer (#33089, #33084) -- add dag_run_ids and task_ids filter for the batch task instance API endpoint (#32705) -- Ensure DAG-level references are filled on unmap (#33083) -- Add support for arrays of different data types in the Trigger Form UI (#32734) -- Always show gantt and code tabs (#33029) -- Move listener success hook to after SQLAlchemy commit (#32988) -- Rename ``db upgrade`` to ``db migrate`` and add ``connections create-default-connections`` (#32810, #33136) -- Remove old gantt chart and redirect to grid views gantt tab (#32908) -- Adjust graph zoom based on selected task (#32792) -- Call listener on_task_instance_running after rendering templates (#32716) -- Display execution_date in graph view task instance tooltip. (#32527) -- Allow configuration to be contributed by providers (#32604, #32755, #32812) -- Reduce default for max TIs per query, enforce ``<=`` parallelism (#32572) -- Store config description in Airflow configuration object (#32669) -- Use ``isdisjoint`` instead of ``not intersection`` (#32616) -- Speed up calculation of leaves and roots for task groups (#32592) -- Kubernetes Executor Load Time Optimizations (#30727) -- Save DAG parsing time if dag is not schedulable (#30911) -- Updates health check endpoint to include ``dag_processor`` status. (#32382) -- Disable default allowing the testing of connections in UI, API and CLI (#32052, #33342) -- Fix config var types under the scheduler section (#32132) -- Allow to sort Grid View alphabetically (#32179) -- Add hostname to triggerer metric ``[triggers.running]`` (#32050) -- Improve DAG ORM cleanup code (#30614) -- ``TriggerDagRunOperator``: Add ``wait_for_completion`` to ``template_fields`` (#31122) -- Open links in new tab that take us away from Airflow UI (#32088) -- Only show code tab when a task is not selected (#31744) -- Add descriptions for celery and dask cert configs (#31822) -- ``PythonVirtualenvOperator`` termination log in alert (#31747) -- Migration of all DAG details to existing grid view dag details panel (#31690) -- Add a diagram to help visualize timer metrics (#30650) -- Celery Executor load time optimizations (#31001) -- Update code style for ``airflow db`` commands to SQLAlchemy 2.0 style (#31486) -- Mark uses of md5 as "not-used-for-security" in FIPS environments (#31171) -- Add pydantic support to serde (#31565) -- Enable search in note column in DagRun and TaskInstance (#31455) -- Save scheduler execution time by adding new Index idea for dag_run (#30827) -- Save scheduler execution time by caching dags (#30704) -- Support for sorting DAGs by Last Run Date in the web UI (#31234) -- Better typing for Job and JobRunners (#31240) -- Add sorting logic by created_date for fetching triggers (#31151) -- Remove DAGs.can_create on access control doc, adjust test fixture (#30862) -- Split Celery logs into stdout/stderr (#30485) -- Decouple metrics clients and ``validators`` into their own modules (#30802) -- Description added for pagination in ``get_log`` api (#30729) -- Optimize performance of scheduling mapped tasks (#30372) -- Add sentry transport configuration option (#30419) -- Better message on deserialization error (#30588) - -Bug Fixes -""""""""" -- Remove user sessions when resetting password (#33347) -- ``Gantt chart:`` Use earliest/oldest ti dates if different than dag run start/end (#33215) -- Fix ``virtualenv`` detection for Python ``virtualenv`` operator (#33223) -- Correctly log when there are problems trying to ``chmod`` ``airflow.cfg`` (#33118) -- Pass app context to webserver_config.py (#32759) -- Skip served logs for non-running task try (#32561) -- Fix reload gunicorn workers (#32102) -- Fix future DagRun rarely triggered by race conditions when ``max_active_runs`` reached its upper limit. (#31414) -- Fix BaseOperator ``get_task_instances`` query (#33054) -- Fix issue with using the various state enum value in logs (#33065) -- Use string concatenation to prepend base URL for log_url (#33063) -- Update graph nodes with operator style attributes (#32822) -- Affix webserver access_denied warning to be configurable (#33022) -- Only load task action modal if user can edit (#32992) -- OpenAPI Spec fix nullable alongside ``$ref`` (#32887) -- Make the decorators of ``PythonOperator`` sub-classes extend its decorator (#32845) -- Fix check if ``virtualenv`` is installed in ``PythonVirtualenvOperator`` (#32939) -- Unwrap Proxy before checking ``__iter__`` in is_container() (#32850) -- Override base log folder by using task handler's base_log_folder (#32781) -- Catch arbitrary exception from run_job to prevent zombie scheduler (#32707) -- Fix depends_on_past work for dynamic tasks (#32397) -- Sort extra_links for predictable order in UI. (#32762) -- Fix prefix group false graph (#32764) -- Fix bad delete logic for dagruns (#32684) -- Fix bug in prune_dict where empty dict and list would be removed even in strict mode (#32573) -- Add explicit browsers list and correct rel for blank target links (#32633) -- Handle returned None when multiple_outputs is True (#32625) -- Fix returned value when ShortCircuitOperator condition is falsy and there is not downstream tasks (#32623) -- Fix returned value when ShortCircuitOperator condition is falsy (#32569) -- Fix rendering of ``dagRunTimeout`` (#32565) -- Fix permissions on ``/blocked`` endpoint (#32571) -- Bugfix, prevent force of unpause on trigger DAG (#32456) -- Fix data interval in ``cli.dags.trigger`` command output (#32548) -- Strip ``whitespaces`` from airflow connections form (#32292) -- Add timedelta support for applicable arguments of sensors (#32515) -- Fix incorrect default on ``readonly`` property in our API (#32510) -- Add xcom map_index as a filter to xcom endpoint (#32453) -- Fix CLI commands when custom timetable is used (#32118) -- Use WebEncoder to encode DagRun.conf in DagRun's list view (#32385) -- Fix logic of the skip_all_except method (#31153) -- Ensure dynamic tasks inside dynamic task group only marks the (#32354) -- Handle the cases that webserver.expose_config is set to non-sensitive-only instead of boolean value (#32261) -- Add retry functionality for handling process termination caused by database network issues (#31998) -- Adapt Notifier for sla_miss_callback (#31887) -- Fix XCOM view (#31807) -- Fix for "Filter dags by tag" flickering on initial load of dags.html (#31578) -- Fix where expanding ``resizer`` would not expanse grid view (#31581) -- Fix MappedOperator-BaseOperator attr sync check (#31520) -- Always pass named ``type_`` arg to drop_constraint (#31306) -- Fix bad ``drop_constraint`` call in migrations (#31302) -- Resolving problems with redesigned grid view (#31232) -- Support ``requirepass`` redis sentinel (#30352) -- Fix webserver crash when calling get ``/config`` (#31057) - -Misc/Internal -""""""""""""" -- Modify pathspec version restriction (#33349) -- Refactor: Simplify code in ``dag_processing`` (#33161) -- For now limit ``Pydantic`` to ``< 2.0.0`` (#33235) -- Refactor: Simplify code in models (#33181) -- Add elasticsearch group to pre-2.7 defaults (#33166) -- Refactor: Simplify dict manipulation in airflow/cli (#33159) -- Remove redundant dict.keys() call (#33158) -- Upgrade ruff to latest 0.0.282 version in pre-commits (#33152) -- Move openlineage configuration to provider (#33124) -- Replace State by TaskInstanceState in Airflow executors (#32627) -- Get rid of Python 2 numeric relics (#33050) -- Remove legacy dag code (#33058) -- Remove legacy task instance modal (#33060) -- Remove old graph view (#32958) -- Move CeleryExecutor to the celery provider (#32526, #32628) -- Move all k8S classes to ``cncf.kubernetes`` provider (#32767, #32891) -- Refactor existence-checking SQL to helper (#32790) -- Extract Dask executor to new daskexecutor provider (#32772) -- Remove atlas configuration definition (#32776) -- Add Redis task handler (#31855) -- Move writing configuration for webserver to main (webserver limited) (#32766) -- Improve getting the query count in Airflow API endpoints (#32630) -- Remove click upper bound (#32634) -- Add D400 ``pydocstyle`` check - core Airflow only (#31297) -- D205 Support (#31742, #32575, #32213, #32212, #32591, #32449, #32450) -- Bump word-wrap from ``1.2.3 to 1.2.4`` in ``/airflow/www`` (#32680) -- Strong-type all single-state enum values (#32537) -- More strong typed state conversion (#32521) -- SQL query improvements in utils/db.py (#32518) -- Bump semver from ``6.3.0 to 6.3.1`` in ``/airflow/www`` (#32506) -- Bump jsonschema version to ``4.18.0`` (#32445) -- Bump ``stylelint`` from ``13.13.1 to 15.10.1`` in ``/airflow/www`` (#32435) -- Bump tough-cookie from ``4.0.0 to 4.1.3`` in ``/airflow/www`` (#32443) -- upgrade flask-appbuilder (#32054) -- Support ``Pydantic`` 2 (#32366) -- Limit click until we fix mypy issues (#32413) -- A couple of minor cleanups (#31890) -- Replace State usages with strong-typed ``enums`` (#31735) -- Upgrade ruff to ``0.272`` (#31966) -- Better error message when serializing callable without name (#31778) -- Improve the views module a bit (#31661) -- Remove ``asynctest`` (#31664) -- Refactor sqlalchemy queries to ``2.0`` style (#31569, #31772, #32350, #32339, #32474, #32645) -- Remove Python ``3.7`` support (#30963) -- Bring back min-airflow-version for preinstalled providers (#31469) -- Docstring improvements (#31375) -- Improve typing in SchedulerJobRunner (#31285) -- Upgrade ruff to ``0.0.262`` (#30809) -- Upgrade to MyPy ``1.2.0`` (#30687) - -Docs only changes -""""""""""""""""" -- Clarify UI user types in security model (#33021) -- Add links to ``DAGRun / DAG / Task`` in templates-ref.rst (#33013) -- Add docs of how to test for DAG Import Errors (#32811) -- Clean-up of our new security page (#32951) -- Cleans up Extras reference page (#32954) -- Update Dag trigger API and command docs (#32696) -- Add deprecation info to the Airflow modules and classes docstring (#32635) -- Formatting installation doc to improve readability (#32502) -- Fix triggerer HA doc (#32454) -- Add type annotation to code examples (#32422) -- Document cron and delta timetables (#32392) -- Update index.rst doc to correct grammar (#32315) -- Fixing small typo in python.py (#31474) -- Separate out and clarify policies for providers (#30657) -- Fix docs: add an "apache" prefix to pip install (#30681) - - -Airflow 2.6.3 (2023-07-10) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Default allowed pattern of a run_id has been changed to ``^[A-Za-z0-9_.~:+-]+$`` (#32293). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Previously, there was no validation on the run_id string. There is now a validation regex that -can be set by configuring ``allowed_run_id_pattern`` in ``scheduler`` section. - -Bug Fixes -""""""""" -- Use linear time regular expressions (#32303) -- Fix triggerers alive check and add a new conf for triggerer heartbeat rate (#32123) -- Catch the exception that triggerer initialization failed (#31999) -- Hide sensitive values from extra in connection edit form (#32309) -- Sanitize ``DagRun.run_id`` and allow flexibility (#32293) -- Add triggerer canceled log (#31757) -- Fix try number shown in the task view (#32361) -- Retry transactions on occasional deadlocks for rendered fields (#32341) -- Fix behaviour of LazyDictWithCache when import fails (#32248) -- Remove ``executor_class`` from Job - fixing backfill for custom executors (#32219) -- Fix bugged singleton implementation (#32218) -- Use ``mapIndex`` to display extra links per mapped task. (#32154) -- Ensure that main triggerer thread exits if the async thread fails (#32092) -- Use ``re2`` for matching untrusted regex (#32060) -- Render list items in rendered fields view (#32042) -- Fix hashing of ``dag_dependencies`` in serialized dag (#32037) -- Return ``None`` if an XComArg fails to resolve in a multiple_outputs Task (#32027) -- Check for DAG ID in query param from url as well as kwargs (#32014) -- Flash an error message instead of failure in ``rendered-templates`` when map index is not found (#32011) -- Fix ``ExternalTaskSensor`` when there is no task group TIs for the current execution date (#32009) -- Fix number param html type in trigger template (#31980, #31946) -- Fix masking nested variable fields (#31964) -- Fix ``operator_extra_links`` property serialization in mapped tasks (#31904) -- Decode old-style nested Xcom value (#31866) -- Add a check for trailing slash in webserver base_url (#31833) -- Fix connection uri parsing when the host includes a scheme (#31465) -- Fix database session closing with ``xcom_pull`` and ``inlets`` (#31128) -- Fix DAG's ``on_failure_callback`` is not invoked when task failed during testing dag. (#30965) -- Fix airflow module version check when using ``ExternalPythonOperator`` and debug logging level (#30367) - -Misc/Internal -""""""""""""" -- Fix ``task.sensor`` annotation in type stub (#31954) -- Limit ``Pydantic`` to ``< 2.0.0`` until we solve ``2.0.0`` incompatibilities (#32312) -- Fix ``Pydantic`` 2 pickiness about model definition (#32307) - -Doc only changes -"""""""""""""""" -- Add explanation about tag creation and cleanup (#32406) -- Minor updates to docs (#32369, #32315, #32310, #31794) -- Clarify Listener API behavior (#32269) -- Add information for users who ask for requirements (#32262) -- Add links to DAGRun / DAG / Task in Templates Reference (#32245) -- Add comment to warn off a potential wrong fix (#32230) -- Add a note that we'll need to restart triggerer to reflect any trigger change (#32140) -- Adding missing hyperlink to the tutorial documentation (#32105) -- Added difference between Deferrable and Non-Deferrable Operators (#31840) -- Add comments explaining need for special "trigger end" log message (#31812) -- Documentation update on Plugin updates. (#31781) -- Fix SemVer link in security documentation (#32320) -- Update security model of Airflow (#32098) -- Update references to restructured documentation from Airflow core (#32282) -- Separate out advanced logging configuration (#32131) -- Add ``™`` to Airflow in prominent places (#31977) - - -Airflow 2.6.2 (2023-06-17) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -No significant changes. - -Bug Fixes -^^^^^^^^^ -- Cascade update of TaskInstance to TaskMap table (#31445) -- Fix Kubernetes executors detection of deleted pods (#31274) -- Use keyword parameters for migration methods for mssql (#31309) -- Control permissibility of driver config in extra from airflow.cfg (#31754) -- Fixing broken links in openapi/v1.yaml (#31619) -- Hide old alert box when testing connection with different value (#31606) -- Add TriggererStatus to OpenAPI spec (#31579) -- Resolving issue where Grid won't un-collapse when Details is collapsed (#31561) -- Fix sorting of tags (#31553) -- Add the missing ``map_index`` to the xcom key when skipping downstream tasks (#31541) -- Fix airflow users delete CLI command (#31539) -- Include triggerer health status in Airflow ``/health`` endpoint (#31529) -- Remove dependency already registered for this task warning (#31502) -- Use kube_client over default CoreV1Api for deleting pods (#31477) -- Ensure min backoff in base sensor is at least 1 (#31412) -- Fix ``max_active_tis_per_dagrun`` for Dynamic Task Mapping (#31406) -- Fix error handling when pre-importing modules in DAGs (#31401) -- Fix dropdown default and adjust tutorial to use 42 as default for proof (#31400) -- Fix crash when clearing run with task from normal to mapped (#31352) -- Make BaseJobRunner a generic on the job class (#31287) -- Fix ``url_for_asset`` fallback and 404 on DAG Audit Log (#31233) -- Don't present an undefined execution date (#31196) -- Added spinner activity while the logs load (#31165) -- Include rediss to the list of supported URL schemes (#31028) -- Optimize scheduler by skipping "non-schedulable" DAGs (#30706) -- Save scheduler execution time during search for queued dag_runs (#30699) -- Fix ExternalTaskSensor to work correctly with task groups (#30742) -- Fix DAG.access_control can't sync when clean access_control (#30340) -- Fix failing get_safe_url tests for latest Python 3.8 and 3.9 (#31766) -- Fix typing for POST user endpoint (#31767) -- Fix wrong update for nested group default args (#31776) -- Fix overriding ``default_args`` in nested task groups (#31608) -- Mark ``[secrets] backend_kwargs`` as a sensitive config (#31788) -- Executor events are not always "exited" here (#30859) -- Validate connection IDs (#31140) - -Misc/Internal -""""""""""""" -- Add Python 3.11 support (#27264) -- Replace unicodecsv with standard csv library (#31693) -- Bring back unicodecsv as dependency of Airflow (#31814) -- Remove found_descendents param from get_flat_relative_ids (#31559) -- Fix typing in external task triggers (#31490) -- Wording the next and last run DAG columns better (#31467) -- Skip auto-document things with :meta private: (#31380) -- Add an example for sql_alchemy_connect_args conf (#31332) -- Convert dask upper-binding into exclusion (#31329) -- Upgrade FAB to 4.3.1 (#31203) -- Added metavar and choices to --state flag in airflow dags list-jobs CLI for suggesting valid state arguments. (#31308) -- Use only one line for tmp dir log (#31170) -- Rephrase comment in setup.py (#31312) -- Add fullname to owner on logging (#30185) -- Make connection id validation consistent across interface (#31282) -- Use single source of truth for sensitive config items (#31820) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Add docstring and signature for _read_remote_logs (#31623) -- Remove note about triggerer being 3.7+ only (#31483) -- Fix version support information (#31468) -- Add missing BashOperator import to documentation example (#31436) -- Fix task.branch error caused by incorrect initial parameter (#31265) -- Update callbacks documentation (errors and context) (#31116) -- Add an example for dynamic task mapping with non-TaskFlow operator (#29762) -- Few doc fixes - links, grammar and wording (#31719) -- Add description in a few more places about adding airflow to pip install (#31448) -- Fix table formatting in docker build documentation (#31472) -- Update documentation for constraints installation (#31882) - -Airflow 2.6.1 (2023-05-16) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Clarifications of the external Health Check mechanism and using ``Job`` classes (#31277). -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -In the past SchedulerJob and other ``*Job`` classes are known to have been used to perform -external health checks for Airflow components. Those are, however, Airflow DB ORM related classes. -The DB models and database structure of Airflow are considered as internal implementation detail, following -`public interface `_). -Therefore, they should not be used for external health checks. Instead, you should use the -``airflow jobs check`` CLI command (introduced in Airflow 2.1) for that purpose. - -Bug Fixes -^^^^^^^^^ -- Fix calculation of health check threshold for SchedulerJob (#31277) -- Fix timestamp parse failure for k8s executor pod tailing (#31175) -- Make sure that DAG processor job row has filled value in ``job_type`` column (#31182) -- Fix section name reference for ``api_client_retry_configuration`` (#31174) -- Ensure the KPO runs pod mutation hooks correctly (#31173) -- Remove worrying log message about redaction from the OpenLineage plugin (#31149) -- Move ``interleave_timestamp_parser`` config to the logging section (#31102) -- Ensure that we check worker for served logs if no local or remote logs found (#31101) -- Fix ``MappedTaskGroup`` import in taskinstance file (#31100) -- Format DagBag.dagbag_report() Output (#31095) -- Mask task attribute on task detail view (#31125) -- Fix template error when iterating None value and fix params documentation (#31078) -- Fix ``apache-hive`` extra so it installs the correct package (#31068) -- Fix issue with zip files in DAGs folder when pre-importing Airflow modules (#31061) -- Move TaskInstanceKey to a separate file to fix circular import (#31033, #31204) -- Fix deleting DagRuns and TaskInstances that have a note (#30987) -- Fix ``airflow providers get`` command output (#30978) -- Fix Pool schema in the OpenAPI spec (#30973) -- Add support for dynamic tasks with template fields that contain ``pandas.DataFrame`` (#30943) -- Use the Task Group explicitly passed to 'partial' if any (#30933) -- Fix ``order_by`` request in list DAG rest api (#30926) -- Include node height/width in center-on-task logic (#30924) -- Remove print from dag trigger command (#30921) -- Improve task group UI in new graph (#30918) -- Fix mapped states in grid view (#30916) -- Fix problem with displaying graph (#30765) -- Fix backfill KeyError when try_number out of sync (#30653) -- Re-enable clear and setting state in the TaskInstance UI (#30415) -- Prevent DagRun's ``state`` and ``start_date`` from being reset when clearing a task in a running DagRun (#30125) - -Misc/Internal -""""""""""""" -- Upper bind dask until they solve a side effect in their test suite (#31259) -- Show task instances affected by clearing in a table (#30633) -- Fix missing models in API documentation (#31021) - -Doc only changes -"""""""""""""""" -- Improve description of the ``dag_processing.processes`` metric (#30891) -- Improve Quick Start instructions (#30820) -- Add section about missing task logs to the FAQ (#30717) -- Mount the ``config`` directory in docker compose (#30662) -- Update ``version_added`` config field for ``might_contain_dag`` and ``metrics_allow_list`` (#30969) - - -Airflow 2.6.0 (2023-04-30) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Default permissions of file task handler log directories and files has been changed to "owner + group" writeable (#29506). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Default setting handles case where impersonation is needed and both users (airflow and the impersonated user) -have the same group set as main group. Previously the default was also other-writeable and the user might choose -to use the other-writeable setting if they wish by configuring ``file_task_handler_new_folder_permissions`` -and ``file_task_handler_new_file_permissions`` in ``logging`` section. - -SLA callbacks no longer add files to the dag processor manager's queue (#30076) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -This stops SLA callbacks from keeping the dag processor manager permanently busy. It means reduced CPU, -and fixes issues where SLAs stop the system from seeing changes to existing dag files. Additional metrics added to help track queue state. - -The ``cleanup()`` method in BaseTrigger is now defined as asynchronous (following async/await) pattern (#30152). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -This is potentially a breaking change for any custom trigger implementations that override the ``cleanup()`` -method and uses synchronous code, however using synchronous operations in cleanup was technically wrong, -because the method was executed in the main loop of the Triggerer and it was introducing unnecessary delays -impacting other triggers. The change is unlikely to affect any existing trigger implementations. - -The gauge ``scheduler.tasks.running`` no longer exist (#30374) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -The gauge has never been working and its value has always been 0. Having an accurate -value for this metric is complex so it has been decided that removing this gauge makes -more sense than fixing it with no certainty of the correctness of its value. - -Consolidate handling of tasks stuck in queued under new ``task_queued_timeout`` config (#30375) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Logic for handling tasks stuck in the queued state has been consolidated, and the all configurations -responsible for timing out stuck queued tasks have been deprecated and merged into -``[scheduler] task_queued_timeout``. The configurations that have been deprecated are -``[kubernetes] worker_pods_pending_timeout``, ``[celery] stalled_task_timeout``, and -``[celery] task_adoption_timeout``. If any of these configurations are set, the longest timeout will be -respected. For example, if ``[celery] stalled_task_timeout`` is 1200, and ``[scheduler] task_queued_timeout`` -is 600, Airflow will set ``[scheduler] task_queued_timeout`` to 1200. - -Improvement Changes -^^^^^^^^^^^^^^^^^^^ - -Display only the running configuration in configurations view (#28892) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -The configurations view now only displays the running configuration. Previously, the default configuration -was displayed at the top but it was not obvious whether this default configuration was overridden or not. -Subsequently, the non-documented endpoint ``/configuration?raw=true`` is deprecated and will be removed in -Airflow 3.0. The HTTP response now returns an additional ``Deprecation`` header. The ``/config`` endpoint on -the REST API is the standard way to fetch Airflow configuration programmatically. - -Explicit skipped states list for ExternalTaskSensor (#29933) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -ExternalTaskSensor now has an explicit ``skipped_states`` list - -Miscellaneous Changes -^^^^^^^^^^^^^^^^^^^^^ - -Handle OverflowError on exponential backoff in next_run_calculation (#28172) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Maximum retry task delay is set to be 24h (86400s) by default. You can change it globally via ``core.max_task_retry_delay`` -parameter. - -Move Hive macros to the provider (#28538) -""""""""""""""""""""""""""""""""""""""""" -The Hive Macros (``hive.max_partition``, ``hive.closest_ds_partition``) are available only when Hive Provider is -installed. Please install Hive Provider > 5.1.0 when using those macros. - -Updated app to support configuring the caching hash method for FIPS v2 (#30675) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Various updates for FIPS-compliance when running Airflow in Python 3.9+. This includes a new webserver option, ``caching_hash_method``, -for changing the default flask caching method. - -New Features -^^^^^^^^^^^^ -- AIP-50 Trigger DAG UI Extension with Flexible User Form Concept (#27063,#29376) -- Skip PythonVirtualenvOperator task when it returns a provided exit code (#30690) -- rename skip_exit_code to skip_on_exit_code and allow providing multiple codes (#30692) -- Add skip_on_exit_code also to ExternalPythonOperator (#30738) -- Add ``max_active_tis_per_dagrun`` for Dynamic Task Mapping (#29094) -- Add serializer for pandas dataframe (#30390) -- Deferrable ``TriggerDagRunOperator`` (#30292) -- Add command to get DAG Details via CLI (#30432) -- Adding ContinuousTimetable and support for @continuous schedule_interval (#29909) -- Allow customized rules to check if a file has dag (#30104) -- Add a new Airflow conf to specify a SSL ca cert for Kubernetes client (#30048) -- Bash sensor has an explicit retry code (#30080) -- Add filter task upstream/downstream to grid view (#29885) -- Add testing a connection via Airflow CLI (#29892) -- Support deleting the local log files when using remote logging (#29772) -- ``Blocklist`` to disable specific metric tags or metric names (#29881) -- Add a new graph inside of the grid view (#29413) -- Add database ``check_migrations`` config (#29714) -- add output format arg for ``cli.dags.trigger`` (#29224) -- Make json and yaml available in templates (#28930) -- Enable tagged metric names for existing Statsd metric publishing events | influxdb-statsd support (#29093) -- Add arg --yes to ``db export-archived`` command. (#29485) -- Make the policy functions pluggable (#28558) -- Add ``airflow db drop-archived`` command (#29309) -- Enable individual trigger logging (#27758) -- Implement new filtering options in graph view (#29226) -- Add triggers for ExternalTask (#29313) -- Add command to export purged records to CSV files (#29058) -- Add ``FileTrigger`` (#29265) -- Emit DataDog statsd metrics with metadata tags (#28961) -- Add some statsd metrics for dataset (#28907) -- Add --overwrite option to ``connections import`` CLI command (#28738) -- Add general-purpose "notifier" concept to DAGs (#28569) -- Add a new conf to wait past_deps before skipping a task (#27710) -- Add Flink on K8s Operator (#28512) -- Allow Users to disable SwaggerUI via configuration (#28354) -- Show mapped task groups in graph (#28392) -- Log FileTaskHandler to work with KubernetesExecutor's multi_namespace_mode (#28436) -- Add a new config for adapting masked secrets to make it easier to prevent secret leakage in logs (#28239) -- List specific config section and its values using the cli (#28334) -- KubernetesExecutor multi_namespace_mode can use namespace list to avoid requiring cluster role (#28047) -- Automatically save and allow restore of recent DAG run configs (#27805) -- Added exclude_microseconds to cli (#27640) - -Improvements -"""""""""""" -- Rename most pod_id usage to pod_name in KubernetesExecutor (#29147) -- Update the error message for invalid use of poke-only sensors (#30821) -- Update log level in scheduler critical section edge case (#30694) -- AIP-51 Removing Executor Coupling from Core Airflow (`AIP-51 `__) -- Add multiple exit code handling in skip logic for BashOperator (#30739) -- Updated app to support configuring the caching hash method for FIPS v2 (#30675) -- Preload airflow imports before dag parsing to save time (#30495) -- Improve task & run actions ``UX`` in grid view (#30373) -- Speed up TaskGroups with caching property of group_id (#30284) -- Use the engine provided in the session (#29804) -- Type related import optimization for Executors (#30361) -- Add more type hints to the code base (#30503) -- Always use self.appbuilder.get_session in security managers (#30233) -- Update SQLAlchemy ``select()`` to new style (#30515) -- Refactor out xcom constants from models (#30180) -- Add exception class name to DAG-parsing error message (#30105) -- Rename statsd_allow_list and statsd_block_list to ``metrics_*_list`` (#30174) -- Improve serialization of tuples and sets (#29019) -- Make cleanup method in trigger an async one (#30152) -- Lazy load serialization modules (#30094) -- SLA callbacks no longer add files to the dag_processing manager queue (#30076) -- Add task.trigger rule to grid_data (#30130) -- Speed up log template sync by avoiding ORM (#30119) -- Separate cli_parser.py into two modules (#29962) -- Explicit skipped states list for ExternalTaskSensor (#29933) -- Add task state hover highlighting to new graph (#30100) -- Store grid tabs in url params (#29904) -- Use custom Connexion resolver to load lazily (#29992) -- Delay Kubernetes import in secret masker (#29993) -- Delay ConnectionModelView init until it's accessed (#29946) -- Scheduler, make stale DAG deactivation threshold configurable instead of using dag processing timeout (#29446) -- Improve grid view height calculations (#29563) -- Avoid importing executor during conf validation (#29569) -- Make permissions for FileTaskHandler group-writeable and configurable (#29506) -- Add colors in help outputs of Airflow CLI commands #28789 (#29116) -- Add a param for get_dags endpoint to list only unpaused dags (#28713) -- Expose updated_at filter for dag run and task instance endpoints (#28636) -- Increase length of user identifier columns (#29061) -- Update gantt chart UI to display queued state of tasks (#28686) -- Add index on log.dttm (#28944) -- Display only the running configuration in configurations view (#28892) -- Cap dropdown menu size dynamically (#28736) -- Added JSON linter to connection edit / add UI for field extra. On connection edit screen, existing extra data will be displayed indented (#28583) -- Use labels instead of pod name for pod log read in k8s exec (#28546) -- Use time not tries for queued & running re-checks. (#28586) -- CustomTTYColoredFormatter should inherit TimezoneAware formatter (#28439) -- Improve past depends handling in Airflow CLI tasks.run command (#28113) -- Support using a list of callbacks in ``on_*_callback/sla_miss_callbacks`` (#28469) -- Better table name validation for db clean (#28246) -- Use object instead of array in config.yml for config template (#28417) -- Add markdown rendering for task notes. (#28245) -- Show mapped task groups in grid view (#28208) -- Add ``renamed`` and ``previous_name`` in config sections (#28324) -- Speed up most Users/Role CLI commands (#28259) -- Speed up Airflow role list command (#28244) -- Refactor serialization (#28067, #30819, #30823) -- Allow longer pod names for k8s executor / KPO (#27736) -- Updates health check endpoint to include ``triggerer`` status (#27755) - - -Bug Fixes -""""""""" -- Fix static_folder for cli app (#30952) -- Initialize plugins for cli appbuilder (#30934) -- Fix dag file processor heartbeat to run only if necessary (#30899) -- Fix KubernetesExecutor sending state to scheduler (#30872) -- Count mapped upstream only if all are finished (#30641) -- ExternalTaskSensor: add external_task_group_id to template_fields (#30401) -- Improve url detection for task instance details (#30779) -- Use material icons for dag import error banner (#30771) -- Fix misc grid/graph view UI bugs (#30752) -- Add a collapse grid button (#30711) -- Fix d3 dependencies (#30702) -- Simplify logic to resolve tasks stuck in queued despite stalled_task_timeout (#30375) -- When clearing task instances try to get associated DAGs from database (#29065) -- Fix mapped tasks partial arguments when DAG default args are provided (#29913) -- Deactivate DAGs deleted from within zip files (#30608) -- Recover from ``too old resource version exception`` by retrieving the latest ``resource_version`` (#30425) -- Fix possible race condition when refreshing DAGs (#30392) -- Use custom validator for OpenAPI request body (#30596) -- Fix ``TriggerDagRunOperator`` with deferrable parameter (#30406) -- Speed up dag runs deletion (#30330) -- Do not use template literals to construct html elements (#30447) -- Fix deprecation warning in ``example_sensor_decorator`` DAG (#30513) -- Avoid logging sensitive information in triggerer job log (#30110) -- Add a new parameter for base sensor to catch the exceptions in poke method (#30293) -- Fix dag run conf encoding with non-JSON serializable values (#28777) -- Added fixes for Airflow to be usable on Windows Dask-Workers (#30249) -- Force DAG last modified time to UTC (#30243) -- Fix EmptySkipOperator in example dag (#30269) -- Make the webserver startup respect update_fab_perms (#30246) -- Ignore error when changing log folder permissions (#30123) -- Disable ordering DagRuns by note (#30043) -- Fix reading logs from finished KubernetesExecutor worker pod (#28817) -- Mask out non-access bits when comparing file modes (#29886) -- Remove Run task action from UI (#29706) -- Fix log tailing issues with legacy log view (#29496) -- Fixes to how DebugExecutor handles sensors (#28528) -- Ensure that pod_mutation_hook is called before logging the pod name (#28534) -- Handle OverflowError on exponential backoff in next_run_calculation (#28172) - -Misc/Internal -""""""""""""" -- Make eager upgrade additional dependencies optional (#30811) -- Upgrade to pip 23.1.1 (#30808) -- Remove protobuf limitation from eager upgrade (#30182) -- Remove protobuf limitation from eager upgrade (#30182) -- Deprecate ``skip_exit_code`` in ``BashOperator`` (#30734) -- Remove gauge ``scheduler.tasks.running`` (#30374) -- Bump json5 to 1.0.2 and eslint-plugin-import to 2.27.5 in ``/airflow/www`` (#30568) -- Add tests to PythonOperator (#30362) -- Add asgiref as a core dependency (#30527) -- Discovery safe mode toggle comment clarification (#30459) -- Upgrade moment-timezone package to fix Tehran tz (#30455) -- Bump loader-utils from 2.0.0 to 2.0.4 in ``/airflow/www`` (#30319) -- Bump babel-loader from 8.1.0 to 9.1.0 in ``/airflow/www`` (#30316) -- DagBag: Use ``dag.fileloc`` instead of ``dag.full_filepath`` in exception message (#30610) -- Change log level of serialization information (#30239) -- Minor DagRun helper method cleanup (#30092) -- Improve type hinting in stats.py (#30024) -- Limit ``importlib-metadata`` backport to < 5.0.0 (#29924) -- Align cncf provider file names with AIP-21 (#29905) -- Upgrade FAB to 4.3.0 (#29766) -- Clear ExecutorLoader cache in tests (#29849) -- Lazy load Task Instance logs in UI (#29827) -- added warning log for max page limit exceeding api calls (#29788) -- Aggressively cache entry points in process (#29625) -- Don't use ``importlib.metadata`` to get Version for speed (#29723) -- Upgrade Mypy to 1.0 (#29468) -- Rename ``db export-cleaned`` to ``db export-archived`` (#29450) -- listener: simplify API by replacing SQLAlchemy event-listening by direct calls (#29289) -- No multi-line log entry for bash env vars (#28881) -- Switch to ruff for faster static checks (#28893) -- Remove horizontal lines in TI logs (#28876) -- Make allowed_deserialization_classes more intuitive (#28829) -- Propagate logs to stdout when in k8s executor pod (#28440, #30860) -- Fix code readability, add docstrings to json_client (#28619) -- AIP-51 - Misc. Compatibility Checks (#28375) -- Fix is_local for LocalKubernetesExecutor (#28288) -- Move Hive macros to the provider (#28538) -- Rerun flaky PinotDB integration test (#28562) -- Add pre-commit hook to check session default value (#28007) -- Refactor get_mapped_group_summaries for web UI (#28374) -- Add support for k8s 1.26 (#28320) -- Replace ``freezegun`` with time-machine (#28193) -- Completed D400 for ``airflow/kubernetes/*`` (#28212) -- Completed D400 for multiple folders (#27969) -- Drop k8s 1.21 and 1.22 support (#28168) -- Remove unused task_queue attr from k8s scheduler class (#28049) -- Completed D400 for multiple folders (#27767, #27768) - - -Doc only changes -"""""""""""""""" -- Add instructions on how to avoid accidental airflow upgrade/downgrade (#30813) -- Add explicit information about how to write task logs (#30732) -- Better explanation on how to log from tasks (#30746) -- Use correct import path for Dataset (#30617) -- Create ``audit_logs.rst`` (#30405) -- Adding taskflow API example for sensors (#30344) -- Add clarification about timezone aware dags (#30467) -- Clarity params documentation (#30345) -- Fix unit for task duration metric (#30273) -- Update dag-run.rst for dead links of cli commands (#30254) -- Add Write efficient Python code section to Reducing DAG complexity (#30158) -- Allow to specify which connection, variable or config are being looked up in the backend using ``*_lookup_pattern`` parameters (#29580) -- Add Documentation for notification feature extension (#29191) -- Clarify that executor interface is public but instances are not (#29200) -- Add Public Interface description to Airflow documentation (#28300) -- Add documentation for task group mapping (#28001) -- Some fixes to metrics doc (#30290) - - -Airflow 2.5.3 (2023-04-01) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -No significant changes. - -Bug Fixes -^^^^^^^^^ -- Fix DagProcessorJob integration for standalone dag-processor (#30278) -- Fix proper termination of gunicorn when it hangs (#30188) -- Fix XCom.get_one exactly one exception text (#30183) -- Correct the VARCHAR size to 250. (#30178) -- Revert fix for on_failure_callback when task receives a SIGTERM (#30165) -- Move read only property to DagState to fix generated docs (#30149) -- Ensure that ``dag.partial_subset`` doesn't mutate task group properties (#30129) -- Fix inconsistent returned value of ``airflow dags next-execution`` cli command (#30117) -- Fix www/utils.dag_run_link redirection (#30098) -- Fix ``TriggerRuleDep`` when the mapped tasks count is 0 (#30084) -- Dag processor manager, add retry_db_transaction to _fetch_callbacks (#30079) -- Fix db clean command for mysql db (#29999) -- Avoid considering EmptyOperator in mini scheduler (#29979) -- Fix some long known Graph View UI problems (#29971, #30355, #30360) -- Fix dag docs toggle icon initial angle (#29970) -- Fix tags selection in DAGs UI (#29944) -- Including airflow/example_dags/sql/sample.sql in MANIFEST.in (#29883) -- Fixing broken filter in /taskinstance/list view (#29850) -- Allow generic param dicts (#29782) -- Fix update_mask in patch variable route (#29711) -- Strip markup from app_name if instance_name_has_markup = True (#28894) - -Misc/Internal -^^^^^^^^^^^^^ -- Revert "Also limit importlib on Python 3.9 (#30069)" (#30209) -- Add custom_operator_name to @task.sensor tasks (#30131) -- Bump webpack from 5.73.0 to 5.76.0 in /airflow/www (#30112) -- Formatted config (#30103) -- Remove upper bound limit of astroid (#30033) -- Remove accidentally merged vendor daemon patch code (#29895) -- Fix warning in airflow tasks test command regarding absence of data_interval (#27106) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Adding more information regarding top level code (#30040) -- Update workday example (#30026) -- Fix some typos in the DAGs docs (#30015) -- Update set-up-database.rst (#29991) -- Fix some typos on the kubernetes documentation (#29936) -- Fix some punctuation and grammar (#29342) - - -Airflow 2.5.2 (2023-03-15) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -The date-time fields passed as API parameters or Params should be RFC3339-compliant (#29395) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -In case of API calls, it was possible that "+" passed as part of the date-time fields were not URL-encoded, and -such date-time fields could pass validation. Such date-time parameters should now be URL-encoded (as ``%2B``). - -In case of parameters, we still allow IS8601-compliant date-time (so for example it is possible that -' ' was used instead of ``T`` separating date from time and no timezone was specified) but we raise -deprecation warning. - -Default for ``[webserver] expose_hostname`` changed to ``False`` (#29547) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The default for ``[webserver] expose_hostname`` has been set to ``False``, instead of ``True``. This means administrators must opt-in to expose webserver hostnames to end users. - -Bug Fixes -^^^^^^^^^ -- Fix validation of date-time field in API and Parameter schemas (#29395) -- Fix grid logs for large logs (#29390) -- Fix on_failure_callback when task receives a SIGTERM (#29743) -- Update min version of python-daemon to fix containerd file limits (#29916) -- POST ``/dagRuns`` API should 404 if dag not active (#29860) -- DAG list sorting lost when switching page (#29756) -- Fix Scheduler crash when clear a previous run of a normal task that is now a mapped task (#29645) -- Convert moment with timezone to UTC instead of raising an exception (#29606) -- Fix clear dag run ``openapi`` spec responses by adding additional return type (#29600) -- Don't display empty rendered attrs in Task Instance Details page (#29545) -- Remove section check from get-value command (#29541) -- Do not show version/node in UI traceback for unauthenticated user (#29501) -- Make ``prev_logical_date`` variable offset-aware (#29454) -- Fix nested fields rendering in mapped operators (#29451) -- Datasets, next_run_datasets, remove unnecessary timestamp filter (#29441) -- ``Edgemodifier`` refactoring w/ labels in TaskGroup edge case (#29410) -- Fix Rest API update user output (#29409) -- Ensure Serialized DAG is deleted (#29407) -- Persist DAG and task doc values in TaskFlow API if explicitly set (#29399) -- Redirect to the origin page with all the params (#29212) -- Fixing Task Duration view in case of manual DAG runs only (#22015) (#29195) -- Remove poke method to fall back to parent implementation (#29146) -- PR: Introduced fix to run tasks on Windows systems (#29107) -- Fix warning in migrations about old config. (#29092) -- Emit dagrun failed duration when timeout (#29076) -- Handling error on cluster policy itself (#29056) -- Fix kerberos authentication for the REST API. (#29054) -- Fix leak sensitive field via V1EnvVar on exception (#29016) -- Sanitize url_for arguments before they are passed (#29039) -- Fix dag run trigger with a note. (#29228) -- Write action log to DB when DAG run is triggered via API (#28998) -- Resolve all variables in pickled XCom iterator (#28982) -- Allow URI without authority and host blocks in ``airflow connections add`` (#28922) -- Be more selective when adopting pods with KubernetesExecutor (#28899) -- KubenetesExecutor sends state even when successful (#28871) -- Annotate KubernetesExecutor pods that we don't delete (#28844) -- Throttle streaming log reads (#28818) -- Introduce dag processor job (#28799) -- Fix #28391 manual task trigger from UI fails for k8s executor (#28394) -- Logging poke info when external dag is not none and task_id and task_ids are none (#28097) -- Fix inconsistencies in checking edit permissions for a DAG (#20346) - -Misc/Internal -^^^^^^^^^^^^^ -- Add a check for not templateable fields (#29821) -- Removed continue for not in (#29791) -- Move extra links position in grid view (#29703) -- Bump ``undici`` from ``5.9.1`` to ``5.19.1`` (#29583) -- Change expose_hostname default to false (#29547) -- Change permissions of config/password files created by airflow (#29495) -- Use newer setuptools ``v67.2.0`` (#29465) -- Increase max height for grid view elements (#29367) -- Clarify description of worker control config (#29247) -- Bump ``ua-parser-js`` from ``0.7.31`` to ``0.7.33`` in ``/airflow/www`` (#29172) -- Remove upper bound limitation for ``pytest`` (#29086) -- Check for ``run_id`` url param when linking to ``graph/gantt`` views (#29066) -- Clarify graph view dynamic task labels (#29042) -- Fixing import error for dataset (#29007) -- Update how PythonSensor returns values from ``python_callable`` (#28932) -- Add dep context description for better log message (#28875) -- Bump ``swagger-ui-dist`` from ``3.52.0`` to ``4.1.3`` in ``/airflow/www`` (#28824) -- Limit ``importlib-metadata`` backport to ``< 5.0.0`` (#29924, #30069) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Update pipeline.rst - Fix query in ``merge_data()`` task (#29158) -- Correct argument name of Workday timetable in timetable.rst (#29896) -- Update ref anchor for env var link in Connection how-to doc (#29816) -- Better description for limit in api (#29773) -- Description of dag_processing.last_duration (#29740) -- Update docs re: template_fields typing and subclasses (#29725) -- Fix formatting of Dataset inlet/outlet note in TaskFlow concepts (#29678) -- Specific use-case: adding packages via requirements.txt in compose (#29598) -- Detect is 'docker-compose' existing (#29544) -- Add Landing Times entry to UI docs (#29511) -- Improve health checks in example docker-compose and clarify usage (#29408) -- Remove ``notes`` param from TriggerDagRunOperator docstring (#29298) -- Use ``schedule`` param rather than ``timetable`` in Timetables docs (#29255) -- Add trigger process to Airflow Docker docs (#29203) -- Update set-up-database.rst (#29104) -- Several improvements to the Params doc (#29062) -- Email Config docs more explicit env var examples (#28845) -- Listener plugin example added (#27905) - - -Airflow 2.5.1 (2023-01-20) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Trigger gevent ``monkeypatching`` via environment variable (#28283) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -If you are using gevent for your webserver deployment and used local settings to ``monkeypatch`` gevent, -you might want to replace local settings patching with an ``_AIRFLOW_PATCH_GEVENT`` environment variable -set to 1 in your webserver. This ensures gevent patching is done as early as possible. - -Bug Fixes -^^^^^^^^^ -- Fix masking of non-sensitive environment variables (#28802) -- Remove swagger-ui extra from connexion and install ``swagger-ui-dist`` via npm package (#28788) -- Fix ``UIAlert`` should_show when ``AUTH_ROLE_PUBLIC`` set (#28781) -- Only patch single label when adopting pod (#28776) -- Update CSRF token to expire with session (#28730) -- Fix "airflow tasks render" cli command for mapped task instances (#28698) -- Allow XComArgs for ``external_task_ids`` of ExternalTaskSensor (#28692) -- Row-lock TIs to be removed during mapped task expansion (#28689) -- Handle ConnectionReset exception in Executor cleanup (#28685) -- Fix description of output redirection for access_log for gunicorn (#28672) -- Add back join to zombie query that was dropped in #28198 (#28544) -- Fix calendar view for CronTriggerTimeTable dags (#28411) -- After running the DAG the employees table is empty. (#28353) -- Fix ``DetachedInstanceError`` when finding zombies in Dag Parsing process (#28198) -- Nest header blocks in ``divs`` to fix ``dagid`` copy nit on dag.html (#28643) -- Fix UI caret direction (#28624) -- Guard not-yet-expanded ti in trigger rule dep (#28592) -- Move TI ``setNote`` endpoints under TaskInstance in OpenAPI (#28566) -- Consider previous run in ``CronTriggerTimetable`` (#28532) -- Ensure correct log dir in file task handler (#28477) -- Fix bad pods pickled in executor_config (#28454) -- Add ``ensure_ascii=False`` in trigger dag run API (#28451) -- Add setters to MappedOperator on_*_callbacks (#28313) -- Fix ``ti._try_number`` for deferred and up_for_reschedule tasks (#26993) -- separate ``callModal`` from dag.js (#28410) -- A manual run can't look like a scheduled one (#28397) -- Dont show task/run durations when there is no start_date (#28395) -- Maintain manual scroll position in task logs (#28386) -- Correctly select a mapped task's "previous" task (#28379) -- Trigger gevent ``monkeypatching`` via environment variable (#28283) -- Fix db clean warnings (#28243) -- Make arguments 'offset' and 'length' not required (#28234) -- Make live logs reading work for "other" k8s executors (#28213) -- Add custom pickling hooks to ``LazyXComAccess`` (#28191) -- fix next run datasets error (#28165) -- Ensure that warnings from ``@dag`` decorator are reported in dag file (#28153) -- Do not warn when airflow dags tests command is used (#28138) -- Ensure the ``dagbag_size`` metric decreases when files are deleted (#28135) -- Improve run/task grid view actions (#28130) -- Make BaseJob.most_recent_job favor "running" jobs (#28119) -- Don't emit FutureWarning when code not calling old key (#28109) -- Add ``airflow.api.auth.backend.session`` to backend sessions in compose (#28094) -- Resolve false warning about calling conf.get on moved item (#28075) -- Return list of tasks that will be changed (#28066) -- Handle bad zip files nicely when parsing DAGs. (#28011) -- Prevent double loading of providers from local paths (#27988) -- Fix deadlock when chaining multiple empty mapped tasks (#27964) -- fix: current_state method on TaskInstance doesn't filter by map_index (#27898) -- Don't log CLI actions if db not initialized (#27851) -- Make sure we can get out of a faulty scheduler state (#27834) -- dagrun, ``next_dagruns_to_examine``, add MySQL index hint (#27821) -- Handle DAG disappearing mid-flight when dag verification happens (#27720) -- fix: continue checking sla (#26968) -- Allow generation of connection URI to work when no conn type (#26765) - -Misc/Internal -^^^^^^^^^^^^^ -- Remove limit for ``dnspython`` after eventlet got fixed (#29004) -- Limit ``dnspython`` to < ``2.3.0`` until eventlet incompatibility is solved (#28962) -- Add automated version replacement in example dag indexes (#28090) -- Cleanup and do housekeeping with plugin examples (#28537) -- Limit ``SQLAlchemy`` to below ``2.0`` (#28725) -- Bump ``json5`` from ``1.0.1`` to ``1.0.2`` in ``/airflow/www`` (#28715) -- Fix some docs on using sensors with taskflow (#28708) -- Change Architecture and OperatingSystem classes into ``Enums`` (#28627) -- Add doc-strings and small improvement to email util (#28634) -- Fix ``Connection.get_extra`` type (#28594) -- navbar, cap dropdown size, and add scroll bar (#28561) -- Emit warnings for ``conf.get*`` from the right source location (#28543) -- Move MyPY plugins of ours to dev folder (#28498) -- Add retry to ``purge_inactive_dag_warnings`` (#28481) -- Re-enable Plyvel on ARM as it now builds cleanly (#28443) -- Add SIGUSR2 handler for LocalTaskJob and workers to aid debugging (#28309) -- Convert ``test_task_command`` to Pytest and ``unquarantine`` tests in it (#28247) -- Make invalid characters exception more readable (#28181) -- Bump decode-uri-component from ``0.2.0`` to ``0.2.2`` in ``/airflow/www`` (#28080) -- Use asserts instead of exceptions for executor not started (#28019) -- Simplify dataset ``subgraph`` logic (#27987) -- Order TIs by ``map_index`` (#27904) -- Additional info about Segmentation Fault in ``LocalTaskJob`` (#27381) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Mention mapped operator in cluster policy doc (#28885) -- Slightly improve description of Dynamic DAG generation preamble (#28650) -- Restructure Docs (#27235) -- Update scheduler docs about low priority tasks (#28831) -- Clarify that versioned constraints are fixed at release time (#28762) -- Clarify about docker compose (#28729) -- Adding an example dag for dynamic task mapping (#28325) -- Use docker compose v2 command (#28605) -- Add AIRFLOW_PROJ_DIR to docker-compose example (#28517) -- Remove outdated Optional Provider Feature outdated documentation (#28506) -- Add documentation for [core] mp_start_method config (#27993) -- Documentation for the LocalTaskJob return code counter (#27972) -- Note which versions of Python are supported (#27798) - - -Airflow 2.5.0 (2022-12-02) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -``airflow dags test`` no longer performs a backfill job (#26400) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -In order to make ``airflow dags test`` more useful as a testing and debugging tool, we no -longer run a backfill job and instead run a "local task runner". Users can still backfill -their DAGs using the ``airflow dags backfill`` command. - -Airflow config section ``kubernetes`` renamed to ``kubernetes_executor`` (#26873) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -KubernetesPodOperator no longer considers any core kubernetes config params, so this section now only applies to kubernetes executor. Renaming it reduces potential for confusion. - -``AirflowException`` is now thrown as soon as any dependent tasks of ExternalTaskSensor fails (#27190) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -``ExternalTaskSensor`` no longer hangs indefinitely when ``failed_states`` is set, an ``execute_date_fn`` is used, and some but not all of the dependent tasks fail. -Instead, an ``AirflowException`` is thrown as soon as any of the dependent tasks fail. -Any code handling this failure in addition to timeouts should move to caching the ``AirflowException`` ``BaseClass`` and not only the ``AirflowSensorTimeout`` subclass. - -The Airflow config option ``scheduler.deactivate_stale_dags_interval`` has been renamed to ``scheduler.parsing_cleanup_interval`` (#27828). -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The old option will continue to work but will issue deprecation warnings, and will be removed entirely in Airflow 3. - -New Features -^^^^^^^^^^^^ -- ``TaskRunner``: notify of component start and finish (#27855) -- Add DagRun state change to the Listener plugin system(#27113) -- Metric for raw task return codes (#27155) -- Add logic for XComArg to pull specific map indexes (#27771) -- Clear TaskGroup (#26658, #28003) -- Add critical section query duration metric (#27700) -- Add: #23880 :: Audit log for ``AirflowModelViews(Variables/Connection)`` (#24079, #27994, #27923) -- Add postgres 15 support (#27444) -- Expand tasks in mapped group at run time (#27491) -- reset commits, clean submodules (#27560) -- scheduler_job, add metric for scheduler loop timer (#27605) -- Allow datasets to be used in taskflow (#27540) -- Add expanded_ti_count to ti context (#27680) -- Add user comment to task instance and dag run (#26457, #27849, #27867) -- Enable copying DagRun JSON to clipboard (#27639) -- Implement extra controls for SLAs (#27557) -- add dag parsed time in DAG view (#27573) -- Add max_wait for exponential_backoff in BaseSensor (#27597) -- Expand tasks in mapped group at parse time (#27158) -- Add disable retry flag on backfill (#23829) -- Adding sensor decorator (#22562) -- Api endpoint update ti (#26165) -- Filtering datasets by recent update events (#26942) -- Support ``Is /not`` Null filter for value is None on ``webui`` (#26584) -- Add search to datasets list (#26893) -- Split out and handle 'params' in mapped operator (#26100) -- Add authoring API for TaskGroup mapping (#26844) -- Add ``one_done`` trigger rule (#26146) -- Create a more efficient airflow dag test command that also has better local logging (#26400) -- Support add/remove permissions to roles commands (#26338) -- Auto tail file logs in Web UI (#26169) -- Add triggerer info to task instance in API (#26249) -- Flag to deserialize value on custom XCom backend (#26343) - -Improvements -^^^^^^^^^^^^ -- Allow depth-first execution (#27827) -- UI: Update offset height if data changes (#27865) -- Improve TriggerRuleDep typing and readability (#27810) -- Make views requiring session, keyword only args (#27790) -- Optimize ``TI.xcom_pull()`` with explicit task_ids and map_indexes (#27699) -- Allow hyphens in pod id used by k8s executor (#27737) -- optimise task instances filtering (#27102) -- Use context managers to simplify log serve management (#27756) -- Fix formatting leftovers (#27750) -- Improve task deadlock messaging (#27734) -- Improve "sensor timeout" messaging (#27733) -- Replace urlparse with ``urlsplit`` (#27389) -- Align TaskGroup semantics to AbstractOperator (#27723) -- Add new files to parsing queue on every loop of dag processing (#27060) -- Make Kubernetes Executor & Scheduler resilient to error during PMH execution (#27611) -- Separate dataset deps into individual graphs (#27356) -- Use log.exception where more economical than log.error (#27517) -- Move validation ``branch_task_ids`` into ``SkipMixin`` (#27434) -- Coerce LazyXComAccess to list when pushed to XCom (#27251) -- Update cluster-policies.rst docs (#27362) -- Add warning if connection type already registered within the provider (#27520) -- Activate debug logging in commands with --verbose option (#27447) -- Add classic examples for Python Operators (#27403) -- change ``.first()`` to ``.scalar()`` (#27323) -- Improve reset_dag_run description (#26755) -- Add examples and ``howtos`` about sensors (#27333) -- Make grid view widths adjustable (#27273) -- Sorting plugins custom menu links by category before name (#27152) -- Simplify DagRun.verify_integrity (#26894) -- Add mapped task group info to serialization (#27027) -- Correct the JSON style used for Run config in Grid View (#27119) -- No ``extra__conn_type__`` prefix required for UI behaviors (#26995) -- Improve dataset update blurb (#26878) -- Rename kubernetes config section to kubernetes_executor (#26873) -- decode params for dataset searches (#26941) -- Get rid of the DAGRun details page & rely completely on Grid (#26837) -- Fix scheduler ``crashloopbackoff`` when using ``hostname_callable`` (#24999) -- Reduce log verbosity in KubernetesExecutor. (#26582) -- Don't iterate tis list twice for no reason (#26740) -- Clearer code for PodGenerator.deserialize_model_file (#26641) -- Don't import kubernetes unless you have a V1Pod (#26496) -- Add updated_at column to DagRun and Ti tables (#26252) -- Move the deserialization of custom XCom Backend to 2.4.0 (#26392) -- Avoid calculating all elements when one item is needed (#26377) -- Add ``__future__``.annotations automatically by isort (#26383) -- Handle list when serializing expand_kwargs (#26369) -- Apply PEP-563 (Postponed Evaluation of Annotations) to core airflow (#26290) -- Add more weekday operator and sensor examples #26071 (#26098) -- Align TaskGroup semantics to AbstractOperator (#27723) - -Bug Fixes -^^^^^^^^^ -- Gracefully handle whole config sections being renamed (#28008) -- Add allow list for imports during deserialization (#27887) -- Soft delete datasets that are no longer referenced in DAG schedules or task outlets (#27828) -- Redirect to home view when there are no valid tags in the URL (#25715) -- Refresh next run datasets info in dags view (#27839) -- Make MappedTaskGroup depend on its expand inputs (#27876) -- Make DagRun state updates for paused DAGs faster (#27725) -- Don't explicitly set include_examples to False on task run command (#27813) -- Fix menu border color (#27789) -- Fix backfill queued task getting reset to scheduled state. (#23720) -- Fix clearing child dag mapped tasks from parent dag (#27501) -- Handle json encoding of ``V1Pod`` in task callback (#27609) -- Fix ExternalTaskSensor can't check zipped dag (#27056) -- Avoid re-fetching DAG run in TriggerDagRunOperator (#27635) -- Continue on exception when retrieving metadata (#27665) -- External task sensor fail fix (#27190) -- Add the default None when pop actions (#27537) -- Display parameter values from serialized dag in trigger dag view. (#27482, #27944) -- Move TriggerDagRun conf check to execute (#27035) -- Resolve trigger assignment race condition (#27072) -- Update google_analytics.html (#27226) -- Fix some bug in web ui dags list page (auto-refresh & jump search null state) (#27141) -- Fixed broken URL for docker-compose.yaml (#26721) -- Fix xcom arg.py .zip bug (#26636) -- Fix 404 ``taskInstance`` errors and split into two tables (#26575) -- Fix browser warning of improper thread usage (#26551) -- template rendering issue fix (#26390) -- Clear ``autoregistered`` DAGs if there are any import errors (#26398) -- Fix ``from airflow import version`` lazy import (#26239) -- allow scroll in triggered dag runs modal (#27965) - -Misc/Internal -^^^^^^^^^^^^^ -- Remove ``is_mapped`` attribute (#27881) -- Simplify FAB table resetting (#27869) -- Fix old-style typing in Base Sensor (#27871) -- Switch (back) to late imports (#27730) -- Completed D400 for multiple folders (#27748) -- simplify notes accordion test (#27757) -- completed D400 for ``airflow/callbacks/* airflow/cli/*`` (#27721) -- Completed D400 for ``airflow/api_connexion/* directory`` (#27718) -- Completed D400 for ``airflow/listener/* directory`` (#27731) -- Completed D400 for ``airflow/lineage/* directory`` (#27732) -- Update API & Python Client versions (#27642) -- Completed D400 & D401 for ``airflow/api/*`` directory (#27716) -- Completed D400 for multiple folders (#27722) -- Bump ``minimatch`` from ``3.0.4 to 3.0.8`` in ``/airflow/www`` (#27688) -- Bump loader-utils from ``1.4.1 to 1.4.2 ``in ``/airflow/www`` (#27697) -- Disable nested task mapping for now (#27681) -- bump alembic minimum version (#27629) -- remove unused code.html (#27585) -- Enable python string normalization everywhere (#27588) -- Upgrade dependencies in order to avoid backtracking (#27531) -- Strengthen a bit and clarify importance of triaging issues (#27262) -- Deduplicate type hints (#27508) -- Add stub 'yield' to ``BaseTrigger.run`` (#27416) -- Remove upper-bound limit to dask (#27415) -- Limit Dask to under ``2022.10.1`` (#27383) -- Update old style typing (#26872) -- Enable string normalization for docs (#27269) -- Slightly faster up/downgrade tests (#26939) -- Deprecate use of core get_kube_client in PodManager (#26848) -- Add ``memray`` files to ``gitignore / dockerignore`` (#27001) -- Bump sphinx and ``sphinx-autoapi`` (#26743) -- Simplify ``RTIF.delete_old_records()`` (#26667) -- migrate last react files to typescript (#26112) -- Work around ``pyupgrade`` edge cases (#26384) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Document dag_file_processor_timeouts metric as deprecated (#27067) -- Drop support for PostgreSQL 10 (#27594) -- Update index.rst (#27529) -- Add note about pushing the lazy XCom proxy to XCom (#27250) -- Fix BaseOperator link (#27441) -- [docs] best-practices add use variable with template example. (#27316) -- docs for custom view using plugin (#27244) -- Update graph view and grid view on overview page (#26909) -- Documentation fixes (#26819) -- make consistency on markup title string level (#26696) -- Add documentation to dag test function (#26713) -- Fix broken URL for ``docker-compose.yaml`` (#26726) -- Add a note against use of top level code in timetable (#26649) -- Fix example_datasets dag names (#26495) -- Update docs: zip-like effect is now possible in task mapping (#26435) -- changing to task decorator in docs from classic operator use (#25711) - -Airflow 2.4.3 (2022-11-14) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Make ``RotatingFilehandler`` used in ``DagProcessor`` non-caching (#27223) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -In case you want to decrease cache memory when ``CONFIG_PROCESSOR_MANAGER_LOGGER=True``, and you have your local settings created before, -you can update ``processor_manager_handler`` to use ``airflow.utils.log.non_caching_file_handler.NonCachingRotatingFileHandler`` handler instead of ``logging.RotatingFileHandler``. - -Bug Fixes -^^^^^^^^^ -- Fix double logging with some task logging handler (#27591) -- Replace FAB url filtering function with Airflow's (#27576) -- Fix mini scheduler expansion of mapped task (#27506) -- ``SLAMiss`` is nullable and not always given back when pulling task instances (#27423) -- Fix behavior of ``_`` when searching for DAGs (#27448) -- Fix getting the ``dag/task`` ids from BaseExecutor (#27550) -- Fix SQLAlchemy primary key black-out error on DDRQ (#27538) -- Fix IntegrityError during webserver startup (#27297) -- Add case insensitive constraint to username (#27266) -- Fix python external template keys (#27256) -- Reduce extraneous task log requests (#27233) -- Make ``RotatingFilehandler`` used in ``DagProcessor`` non-caching (#27223) -- Listener: Set task on SQLAlchemy TaskInstance object (#27167) -- Fix dags list page auto-refresh & jump search null state (#27141) -- Set ``executor.job_id`` to ``BackfillJob.id`` for backfills (#27020) - -Misc/Internal -^^^^^^^^^^^^^ -- Bump loader-utils from ``1.4.0`` to ``1.4.1`` in ``/airflow/www`` (#27552) -- Reduce log level for k8s ``TCP_KEEPALIVE`` etc warnings (#26981) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Use correct executable in docker compose docs (#27529) -- Fix wording in DAG Runs description (#27470) -- Document that ``KubernetesExecutor`` overwrites container args (#27450) -- Fix ``BaseOperator`` links (#27441) -- Correct timer units to seconds from milliseconds. (#27360) -- Add missed import in the Trigger Rules example (#27309) -- Update SLA wording to reflect it is relative to ``Dag Run`` start. (#27111) -- Add ``kerberos`` environment variables to the docs (#27028) - -Airflow 2.4.2 (2022-10-23) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Default for ``[webserver] expose_stacktrace`` changed to ``False`` (#27059) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The default for ``[webserver] expose_stacktrace`` has been set to ``False``, instead of ``True``. This means administrators must opt-in to expose tracebacks to end users. - -Bug Fixes -^^^^^^^^^ -- Make tracebacks opt-in (#27059) -- Add missing AUTOINC/SERIAL for FAB tables (#26885) -- Add separate error handler for 405(Method not allowed) errors (#26880) -- Don't re-patch pods that are already controlled by current worker (#26778) -- Handle mapped tasks in task duration chart (#26722) -- Fix task duration cumulative chart (#26717) -- Avoid 500 on dag redirect (#27064) -- Filter dataset dependency data on webserver (#27046) -- Remove double collection of dags in ``airflow dags reserialize`` (#27030) -- Fix auto refresh for graph view (#26926) -- Don't overwrite connection extra with invalid json (#27142) -- Fix next run dataset modal links (#26897) -- Change dag audit log sort by date from asc to desc (#26895) -- Bump min version of jinja2 (#26866) -- Add missing colors to ``state_color_mapping`` jinja global (#26822) -- Fix running debuggers inside ``airflow tasks test`` (#26806) -- Fix warning when using xcomarg dependencies (#26801) -- demote Removed state in priority for displaying task summaries (#26789) -- Ensure the log messages from operators during parsing go somewhere (#26779) -- Add restarting state to TaskState Enum in REST API (#26776) -- Allow retrieving error message from data.detail (#26762) -- Simplify origin string cleaning (#27143) -- Remove DAG parsing from StandardTaskRunner (#26750) -- Fix non-hidden cumulative chart on duration view (#26716) -- Remove TaskFail duplicates check (#26714) -- Fix airflow tasks run --local when dags_folder differs from that of processor (#26509) -- Fix yarn warning from d3-color (#27139) -- Fix version for a couple configurations (#26491) -- Revert "No grid auto-refresh for backfill dag runs (#25042)" (#26463) -- Retry on Airflow Schedule DAG Run DB Deadlock (#26347) - -Misc/Internal -^^^^^^^^^^^^^ -- Clean-ups around task-mapping code (#26879) -- Move user-facing string to template (#26815) -- add icon legend to datasets graph (#26781) -- Bump ``sphinx`` and ``sphinx-autoapi`` (#26743) -- Simplify ``RTIF.delete_old_records()`` (#26667) -- Bump FAB to ``4.1.4`` (#26393) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Fixed triple quotes in task group example (#26829) -- Documentation fixes (#26819) -- make consistency on markup title string level (#26696) -- Add a note against use of top level code in timetable (#26649) -- Fix broken URL for ``docker-compose.yaml`` (#26726) - - -Airflow 2.4.1 (2022-09-30) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -No significant changes. - -Bug Fixes -^^^^^^^^^ - -- When rendering template, unmap task in context (#26702) -- Fix scroll overflow for ConfirmDialog (#26681) -- Resolve deprecation warning re ``Table.exists()`` (#26616) -- Fix XComArg zip bug (#26636) -- Use COALESCE when ordering runs to handle NULL (#26626) -- Check user is active (#26635) -- No missing user warning for public admin (#26611) -- Allow MapXComArg to resolve after serialization (#26591) -- Resolve warning about DISTINCT ON query on dags view (#26608) -- Log warning when secret backend kwargs is invalid (#26580) -- Fix grid view log try numbers (#26556) -- Template rendering issue in passing ``templates_dict`` to task decorator (#26390) -- Fix Deferrable stuck as ``scheduled`` during backfill (#26205) -- Suppress SQLALCHEMY_TRACK_MODIFICATIONS warning in db init (#26617) -- Correctly set ``json_provider_class`` on Flask app so it uses our encoder (#26554) -- Fix WSGI root app (#26549) -- Fix deadlock when mapped task with removed upstream is rerun (#26518) -- ExecutorConfigType should be ``cacheable`` (#26498) -- Fix proper joining of the path for logs retrieved from celery workers (#26493) -- DAG Deps extends ``base_template`` (#26439) -- Don't update backfill run from the scheduler (#26342) - -Doc only changes -^^^^^^^^^^^^^^^^ - -- Clarify owner links document (#26515) -- Fix invalid RST in dataset concepts doc (#26434) -- Document the ``non-sensitive-only`` option for ``expose_config`` (#26507) -- Fix ``example_datasets`` dag names (#26495) -- Zip-like effect is now possible in task mapping (#26435) -- Use task decorator in docs instead of classic operators (#25711) - -Airflow 2.4.0 (2022-09-19) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Data-aware Scheduling and ``Dataset`` concept added to Airflow -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -New to this release of Airflow is the concept of Datasets to Airflow, and with it a new way of scheduling dags: -data-aware scheduling. - -This allows DAG runs to be automatically created as a result of a task "producing" a dataset. In some ways -this can be thought of as the inverse of ``TriggerDagRunOperator``, where instead of the producing DAG -controlling which DAGs get created, the consuming DAGs can "listen" for changes. - -A dataset is identified by a URI: - -.. code-block:: python - - from airflow import Dataset - - # The URI doesn't have to be absolute - dataset = Dataset(uri="my-dataset") - # Or you can use a scheme to show where it lives. - dataset2 = Dataset(uri="s3://bucket/prefix") - -To create a DAG that runs whenever a Dataset is updated use the new ``schedule`` parameter (see below) and -pass a list of 1 or more Datasets: - -.. code-block:: python - - with DAG(dag_id='dataset-consumer', schedule=[dataset]): - ... - -And to mark a task as producing a dataset pass the dataset(s) to the ``outlets`` attribute: - -.. code-block:: python - - @task(outlets=[dataset]) - def my_task(): ... - - - # Or for classic operators - BashOperator(task_id="update-ds", bash_command=..., outlets=[dataset]) - -If you have the producer and consumer in different files you do not need to use the same Dataset object, two -``Dataset()``\s created with the same URI are equal. - -Datasets represent the abstract concept of a dataset, and (for now) do not have any direct read or write -capability - in this release we are adding the foundational feature that we will build upon. - -For more info on Datasets please see `Datasets documentation `_. - -Expanded dynamic task mapping support -""""""""""""""""""""""""""""""""""""" - -Dynamic task mapping now includes support for ``expand_kwargs``, ``zip`` and ``map``. - -For more info on dynamic task mapping please see :doc:`/authoring-and-scheduling/dynamic-task-mapping`. - -DAGS used in a context manager no longer need to be assigned to a module variable (#23592) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Previously you had to assign a DAG to a module-level variable in order for Airflow to pick it up. For example this - - -.. code-block:: python - - with DAG(dag_id="example") as dag: - ... - - - @dag - def dag_maker(): ... - - - dag2 = dag_maker() - - -can become - -.. code-block:: python - - with DAG(dag_id="example"): - ... - - - @dag - def dag_maker(): ... - - - dag_maker() - -If you want to disable the behaviour for any reason then set ``auto_register=False`` on the dag: - -.. code-block:: python - - # This dag will not be picked up by Airflow as it's not assigned to a variable - with DAG(dag_id="example", auto_register=False): - ... - -Deprecation of ``schedule_interval`` and ``timetable`` arguments (#25410) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -We added new DAG argument ``schedule`` that can accept a cron expression, timedelta object, *timetable* object, or list of dataset objects. Arguments ``schedule_interval`` and ``timetable`` are deprecated. - -If you previously used the ``@daily`` cron preset, your DAG may have looked like this: - -.. code-block:: python - - with DAG( - dag_id="my_example", - start_date=datetime(2021, 1, 1), - schedule_interval="@daily", - ): - ... - -Going forward, you should use the ``schedule`` argument instead: - -.. code-block:: python - - with DAG( - dag_id="my_example", - start_date=datetime(2021, 1, 1), - schedule="@daily", - ): - ... - -The same is true if you used a custom timetable. Previously you would have used the ``timetable`` argument: - -.. code-block:: python - - with DAG( - dag_id="my_example", - start_date=datetime(2021, 1, 1), - timetable=EventsTimetable(event_dates=[pendulum.datetime(2022, 4, 5)]), - ): - ... - -Now you should use the ``schedule`` argument: - -.. code-block:: python - - with DAG( - dag_id="my_example", - start_date=datetime(2021, 1, 1), - schedule=EventsTimetable(event_dates=[pendulum.datetime(2022, 4, 5)]), - ): - ... - -Removal of experimental Smart Sensors (#25507) -"""""""""""""""""""""""""""""""""""""""""""""" - -Smart Sensors were added in 2.0 and deprecated in favor of Deferrable operators in 2.2, and have now been removed. - -``airflow.contrib`` packages and deprecated modules are dynamically generated (#26153, #26179, #26167) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The ``airflow.contrib`` packages and deprecated modules from Airflow 1.10 in ``airflow.hooks``, ``airflow.operators``, ``airflow.sensors`` packages are now dynamically generated modules and while users can continue using the deprecated contrib classes, they are no longer visible for static code check tools and will be reported as missing. It is recommended for the users to move to the non-deprecated classes. - -``DBApiHook`` and ``SQLSensor`` have moved (#24836) -""""""""""""""""""""""""""""""""""""""""""""""""""" - -``DBApiHook`` and ``SQLSensor`` have been moved to the ``apache-airflow-providers-common-sql`` provider. - -DAG runs sorting logic changed in grid view (#25090) -"""""""""""""""""""""""""""""""""""""""""""""""""""" - -The ordering of DAG runs in the grid view has been changed to be more "natural". -The new logic generally orders by data interval, but a custom ordering can be -applied by setting the DAG to use a custom timetable. - - -New Features -^^^^^^^^^^^^ -- Add Data-aware Scheduling (`AIP-48 `_) -- Add ``@task.short_circuit`` TaskFlow decorator (#25752) -- Make ``execution_date_or_run_id`` optional in ``tasks test`` command (#26114) -- Automatically register DAGs that are used in a context manager (#23592, #26398) -- Add option of sending DAG parser logs to stdout. (#25754) -- Support multiple ``DagProcessors`` parsing files from different locations. (#25935) -- Implement ``ExternalPythonOperator`` (#25780) -- Make execution_date optional for command ``dags test`` (#26111) -- Implement ``expand_kwargs()`` against a literal list (#25925) -- Add trigger rule tooltip (#26043) -- Add conf parameter to CLI for airflow dags test (#25900) -- Include scheduled slots in pools view (#26006) -- Add ``output`` property to ``MappedOperator`` (#25604) -- Add roles delete command to cli (#25854) -- Add Airflow specific warning classes (#25799) -- Add support for ``TaskGroup`` in ``ExternalTaskSensor`` (#24902) -- Add ``@task.kubernetes`` taskflow decorator (#25663) -- Add a way to import Airflow without side-effects (#25832) -- Let timetables control generated run_ids. (#25795) -- Allow per-timetable ordering override in grid view (#25633) -- Grid logs for mapped instances (#25610, #25621, #25611) -- Consolidate to one ``schedule`` param (#25410) -- DAG regex flag in backfill command (#23870) -- Adding support for owner links in the Dags view UI (#25280) -- Ability to clear a specific DAG Run's task instances via REST API (#23516) -- Possibility to document DAG with a separate markdown file (#25509) -- Add parsing context to DAG Parsing (#25161) -- Implement ``CronTriggerTimetable`` (#23662) -- Add option to mask sensitive data in UI configuration page (#25346) -- Create new databases from the ORM (#24156) -- Implement ``XComArg.zip(*xcom_args)`` (#25176) -- Introduce ``sla_miss`` metric (#23402) -- Implement ``map()`` semantic (#25085) -- Add override method to TaskGroupDecorator (#25160) -- Implement ``expand_kwargs()`` (#24989) -- Add parameter to turn off SQL query logging (#24570) -- Add ``DagWarning`` model, and a check for missing pools (#23317) -- Add Task Logs to Grid details panel (#24249) -- Added small health check server and endpoint in scheduler(#23905) -- Add built-in External Link for ``ExternalTaskMarker`` operator (#23964) -- Add default task retry delay config (#23861) -- Add clear DagRun endpoint. (#23451) -- Add support for timezone as string in cron interval timetable (#23279) -- Add auto-refresh to dags home page (#22900, #24770) - -Improvements -^^^^^^^^^^^^ - -- Add more weekday operator and sensor examples #26071 (#26098) -- Add subdir parameter to dags reserialize command (#26170) -- Update zombie message to be more descriptive (#26141) -- Only send an ``SlaCallbackRequest`` if the DAG is scheduled (#26089) -- Promote ``Operator.output`` more (#25617) -- Upgrade API files to typescript (#25098) -- Less ``hacky`` double-rendering prevention in mapped task (#25924) -- Improve Audit log (#25856) -- Remove mapped operator validation code (#25870) -- More ``DAG(schedule=...)`` improvements (#25648) -- Reduce ``operator_name`` dupe in serialized JSON (#25819) -- Make grid view group/mapped summary UI more consistent (#25723) -- Remove useless statement in ``task_group_to_grid`` (#25654) -- Add optional data interval to ``CronTriggerTimetable`` (#25503) -- Remove unused code in ``/grid`` endpoint (#25481) -- Add and document description fields (#25370) -- Improve Airflow logging for operator Jinja template processing (#25452) -- Update core example DAGs to use ``@task.branch`` decorator (#25242) -- Update DAG ``audit_log`` route (#25415) -- Change stdout and stderr access mode to append in commands (#25253) -- Remove ``getTasks`` from Grid view (#25359) -- Improve taskflow type hints with ParamSpec (#25173) -- Use tables in grid details panes (#25258) -- Explicitly list ``@dag`` arguments (#25044) -- More typing in ``SchedulerJob`` and ``TaskInstance`` (#24912) -- Patch ``getfqdn`` with more resilient version (#24981) -- Replace all ``NBSP`` characters by ``whitespaces`` (#24797) -- Re-serialize all DAGs on ``airflow db upgrade`` (#24518) -- Rework contract of try_adopt_task_instances method (#23188) -- Make ``expand()`` error vague so it's not misleading (#24018) -- Add enum validation for ``[webserver]analytics_tool`` (#24032) -- Add ``dttm`` searchable field in audit log (#23794) -- Allow more parameters to be piped through via ``execute_in_subprocess`` (#23286) -- Use ``func.count`` to count rows (#23657) -- Remove stale serialized dags (#22917) -- AIP45 Remove dag parsing in airflow run local (#21877) -- Add support for queued state in DagRun update endpoint. (#23481) -- Add fields to dagrun endpoint (#23440) -- Use ``sql_alchemy_conn`` for celery result backend when ``result_backend`` is not set (#24496) - -Bug Fixes -^^^^^^^^^ - -- Have consistent types between the ORM and the migration files (#24044, #25869) -- Disallow any dag tags longer than 100 char (#25196) -- Add the dag_id to ``AirflowDagCycleException`` message (#26204) -- Properly build URL to retrieve logs independently from system (#26337) -- For worker log servers only bind to IPV6 when dual stack is available (#26222) -- Fix ``TaskInstance.task`` not defined before ``handle_failure`` (#26040) -- Undo secrets backend config caching (#26223) -- Fix faulty executor config serialization logic (#26191) -- Show ``DAGs`` and ``Datasets`` menu links based on role permission (#26183) -- Allow setting ``TaskGroup`` tooltip via function docstring (#26028) -- Fix RecursionError on graph view of a DAG with many tasks (#26175) -- Fix backfill occasional deadlocking (#26161) -- Fix ``DagRun.start_date`` not set during backfill with ``--reset-dagruns`` True (#26135) -- Use label instead of id for dynamic task labels in graph (#26108) -- Don't fail DagRun when leaf ``mapped_task`` is SKIPPED (#25995) -- Add group prefix to decorated mapped task (#26081) -- Fix UI flash when triggering with dup logical date (#26094) -- Fix Make items nullable for ``TaskInstance`` related endpoints to avoid API errors (#26076) -- Fix ``BranchDateTimeOperator`` to be ``timezone-awreness-insensitive`` (#25944) -- Fix legacy timetable schedule interval params (#25999) -- Fix response schema for ``list-mapped-task-instance`` (#25965) -- Properly check the existence of missing mapped TIs (#25788) -- Fix broken auto-refresh on grid view (#25950) -- Use per-timetable ordering in grid UI (#25880) -- Rewrite recursion when parsing DAG into iteration (#25898) -- Find cross-group tasks in ``iter_mapped_dependants`` (#25793) -- Fail task if mapping upstream fails (#25757) -- Support ``/`` in variable get endpoint (#25774) -- Use cfg default_wrap value for grid logs (#25731) -- Add origin request args when triggering a run (#25729) -- Operator name separate from class (#22834) -- Fix incorrect data interval alignment due to assumption on input time alignment (#22658) -- Return None if an ``XComArg`` fails to resolve (#25661) -- Correct ``json`` arg help in ``airflow variables set`` command (#25726) -- Added MySQL index hint to use ``ti_state`` on ``find_zombies`` query (#25725) -- Only excluded actually expanded fields from render (#25599) -- Grid, fix toast for ``axios`` errors (#25703) -- Fix UI redirect (#26409) -- Require dag_id arg for dags list-runs (#26357) -- Check for queued states for dags auto-refresh (#25695) -- Fix upgrade code for the ``dag_owner_attributes`` table (#25579) -- Add map index to task logs api (#25568) -- Ensure that zombie tasks for dags with errors get cleaned up (#25550) -- Make extra link work in UI (#25500) -- Sync up plugin API schema and definition (#25524) -- First/last names can be empty (#25476) -- Refactor DAG pages to be consistent (#25402) -- Check ``expand_kwargs()`` input type before unmapping (#25355) -- Filter XCOM by key when calculating map lengths (#24530) -- Fix ``ExternalTaskSensor`` not working with dynamic task (#25215) -- Added exception catching to send default email if template file raises any exception (#24943) -- Bring ``MappedOperator`` members in sync with ``BaseOperator`` (#24034) - - -Misc/Internal -^^^^^^^^^^^^^ - -- Add automatically generated ``ERD`` schema for the ``MetaData`` DB (#26217) -- Mark serialization functions as internal (#26193) -- Remove remaining deprecated classes and replace them with ``PEP562`` (#26167) -- Move ``dag_edges`` and ``task_group_to_dict`` to corresponding util modules (#26212) -- Lazily import many modules to improve import speed (#24486, #26239) -- FIX Incorrect typing information (#26077) -- Add missing contrib classes to deprecated dictionaries (#26179) -- Re-configure/connect the ``ORM`` after forking to run a DAG processor (#26216) -- Remove cattrs from lineage processing. (#26134) -- Removed deprecated contrib files and replace them with ``PEP-562`` getattr (#26153) -- Make ``BaseSerialization.serialize`` "public" to other classes. (#26142) -- Change the template to use human readable task_instance description (#25960) -- Bump ``moment-timezone`` from ``0.5.34`` to ``0.5.35`` in ``/airflow/www`` (#26080) -- Fix Flask deprecation warning (#25753) -- Add ``CamelCase`` to generated operations types (#25887) -- Fix migration issues and tighten the CI upgrade/downgrade test (#25869) -- Fix type annotations in ``SkipMixin`` (#25864) -- Workaround setuptools editable packages path issue (#25848) -- Bump ``undici`` from ``5.8.0 to 5.9.1`` in /airflow/www (#25801) -- Add custom_operator_name attr to ``_BranchPythonDecoratedOperator`` (#25783) -- Clarify ``filename_template`` deprecation message (#25749) -- Use ``ParamSpec`` to replace ``...`` in Callable (#25658) -- Remove deprecated modules (#25543) -- Documentation on task mapping additions (#24489) -- Remove Smart Sensors (#25507) -- Fix ``elasticsearch`` test config to avoid warning on deprecated template (#25520) -- Bump ``terser`` from ``4.8.0 to 4.8.1`` in /airflow/ui (#25178) -- Generate ``typescript`` types from rest ``API`` docs (#25123) -- Upgrade utils files to ``typescript`` (#25089) -- Upgrade remaining context file to ``typescript``. (#25096) -- Migrate files to ``ts`` (#25267) -- Upgrade grid Table component to ``ts.`` (#25074) -- Skip mapping against mapped ``ti`` if it returns None (#25047) -- Refactor ``js`` file structure (#25003) -- Move mapped kwargs introspection to separate type (#24971) -- Only assert stuff for mypy when type checking (#24937) -- Bump ``moment`` from ``2.29.3 to 2.29.4`` in ``/airflow/www`` (#24885) -- Remove "bad characters" from our codebase (#24841) -- Remove ``xcom_push`` flag from ``BashOperator`` (#24824) -- Move Flask hook registration to end of file (#24776) -- Upgrade more javascript files to ``typescript`` (#24715) -- Clean up task decorator type hints and docstrings (#24667) -- Preserve original order of providers' connection extra fields in UI (#24425) -- Rename ``charts.css`` to ``chart.css`` (#24531) -- Rename ``grid.css`` to ``chart.css`` (#24529) -- Misc: create new process group by ``set_new_process_group`` utility (#24371) -- Airflow UI fix Prototype Pollution (#24201) -- Bump ``moto`` version (#24222) -- Remove unused ``[github_enterprise]`` from ref docs (#24033) -- Clean up ``f-strings`` in logging calls (#23597) -- Add limit for ``JPype1`` (#23847) -- Simply json responses (#25518) -- Add min attrs version (#26408) - -Doc only changes -^^^^^^^^^^^^^^^^ -- Add url prefix setting for ``Celery`` Flower (#25986) -- Updating deprecated configuration in examples (#26037) -- Fix wrong link for taskflow tutorial (#26007) -- Reorganize tutorials into a section (#25890) -- Fix concept doc for dynamic task map (#26002) -- Update code examples from "classic" operators to taskflow (#25845, #25657) -- Add instructions on manually fixing ``MySQL`` Charset problems (#25938) -- Prefer the local Quick Start in docs (#25888) -- Fix broken link to ``Trigger Rules`` (#25840) -- Improve docker documentation (#25735) -- Correctly link to Dag parsing context in docs (#25722) -- Add note on ``task_instance_mutation_hook`` usage (#25607) -- Note that TaskFlow API automatically passes data between tasks (#25577) -- Update DAG run to clarify when a DAG actually runs (#25290) -- Update tutorial docs to include a definition of operators (#25012) -- Rewrite the Airflow documentation home page (#24795) -- Fix ``task-generated mapping`` example (#23424) -- Add note on subtle logical date change in ``2.2.0`` (#24413) -- Add missing import in best-practices code example (#25391) - - - -Airflow 2.3.4 (2022-08-23) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -Added new config ``[logging]log_formatter_class`` to fix timezone display for logs on UI (#24811) -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -If you are using a custom Formatter subclass in your ``[logging]logging_config_class``, please inherit from ``airflow.utils.log.timezone_aware.TimezoneAware`` instead of ``logging.Formatter``. -For example, in your ``custom_config.py``: - -.. code-block:: python - - from airflow.utils.log.timezone_aware import TimezoneAware - - - # before - class YourCustomFormatter(logging.Formatter): ... - - - # after - class YourCustomFormatter(TimezoneAware): ... - - - AIRFLOW_FORMATTER = LOGGING_CONFIG["formatters"]["airflow"] - AIRFLOW_FORMATTER["class"] = "somewhere.your.custom_config.YourCustomFormatter" - # or use TimezoneAware class directly. If you don't have custom Formatter. - AIRFLOW_FORMATTER["class"] = "airflow.utils.log.timezone_aware.TimezoneAware" - -Bug Fixes -^^^^^^^^^ - -- Disable ``attrs`` state management on ``MappedOperator`` (#24772) -- Serialize ``pod_override`` to JSON before pickling ``executor_config`` (#24356) -- Fix ``pid`` check (#24636) -- Rotate session id during login (#25771) -- Fix mapped sensor with reschedule mode (#25594) -- Cache the custom secrets backend so the same instance gets reused (#25556) -- Add right padding (#25554) -- Fix reducing mapped length of a mapped task at runtime after a clear (#25531) -- Fix ``airflow db reset`` when dangling tables exist (#25441) -- Change ``disable_verify_ssl`` behaviour (#25023) -- Set default task group in dag.add_task method (#25000) -- Removed interfering force of index. (#25404) -- Remove useless logging line (#25347) -- Adding mysql index hint to use index on ``task_instance.state`` in critical section query (#25673) -- Configurable umask to all daemonized processes. (#25664) -- Fix the errors raised when None is passed to template filters (#25593) -- Allow wildcarded CORS origins (#25553) -- Fix "This Session's transaction has been rolled back" (#25532) -- Fix Serialization error in ``TaskCallbackRequest`` (#25471) -- fix - resolve bash by absolute path (#25331) -- Add ``__repr__`` to ParamsDict class (#25305) -- Only load distribution of a name once (#25296) -- convert ``TimeSensorAsync`` ``target_time`` to utc on call time (#25221) -- call ``updateNodeLabels`` after ``expandGroup`` (#25217) -- Stop SLA callbacks gazumping other callbacks and DOS'ing the ``DagProcessorManager`` queue (#25147) -- Fix ``invalidateQueries`` call (#25097) -- ``airflow/www/package.json``: Add name, version fields. (#25065) -- No grid auto-refresh for backfill dag runs (#25042) -- Fix tag link on dag detail page (#24918) -- Fix zombie task handling with multiple schedulers (#24906) -- Bind log server on worker to ``IPv6`` address (#24755) (#24846) -- Add ``%z`` for ``%(asctime)s`` to fix timezone for logs on UI (#24811) -- ``TriggerDagRunOperator.operator_extra_links`` is attr (#24676) -- Send DAG timeout callbacks to processor outside of ``prohibit_commit`` (#24366) -- Don't rely on current ORM structure for db clean command (#23574) -- Clear next method when clearing TIs (#23929) -- Two typing fixes (#25690) - -Doc only changes -^^^^^^^^^^^^^^^^ - -- Update set-up-database.rst (#24983) -- Fix syntax in mysql setup documentation (#24893 (#24939) -- Note how DAG policy works with default_args (#24804) -- Update PythonVirtualenvOperator Howto (#24782) -- Doc: Add hyperlinks to Github PRs for Release Notes (#24532) - -Misc/Internal -^^^^^^^^^^^^^ - -- Remove depreciation warning when use default remote tasks logging handlers (#25764) -- clearer method name in scheduler_job.py (#23702) -- Bump cattrs version (#25689) -- Include missing mention of ``external_executor_id`` in ``sql_engine_collation_for_ids`` docs (#25197) -- Refactor ``DR.task_instance_scheduling_decisions`` (#24774) -- Sort operator extra links (#24992) -- Extends ``resolve_xcom_backend`` function level documentation (#24965) -- Upgrade FAB to 4.1.3 (#24884) -- Limit Flask to <2.3 in the wake of 2.2 breaking our tests (#25511) -- Limit astroid version to < 2.12 (#24982) -- Move javascript compilation to host (#25169) -- Bump typing-extensions and mypy for ParamSpec (#25088) - - -Airflow 2.3.3 (2022-07-09) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ - -We've upgraded Flask App Builder to a major version 4.* (#24399) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Flask App Builder is one of the important components of Airflow Webserver, as -it uses a lot of dependencies that are essential to run the webserver and integrate it -in enterprise environments - especially authentication. - -The FAB 4.* upgrades a number of dependencies to major releases, which upgrades them to versions -that have a number of security issues fixed. A lot of tests were performed to bring the dependencies -in a backwards-compatible way, however the dependencies themselves implement breaking changes in their -internals so it might be that some of those changes might impact the users in case they are using the -libraries for their own purposes. - -One important change that you likely will need to apply to Oauth configuration is to add -``server_metadata_url`` or ``jwks_uri`` and you can read about it more -in `this issue `_. - -Here is the list of breaking changes in dependencies that comes together with FAB 4: - - * ``Flask`` from 1.X to 2.X `breaking changes `__ - - * ``flask-jwt-extended`` 3.X to 4.X `breaking changes: `__ - - * ``Jinja2`` 2.X to 3.X `breaking changes: `__ - - * ``Werkzeug`` 1.X to 2.X `breaking changes `__ - - * ``pyJWT`` 1.X to 2.X `breaking changes: `__ - - * ``Click`` 7.X to 8.X `breaking changes: `__ - - * ``itsdangerous`` 1.X to 2.X `breaking changes `__ - -Bug Fixes -^^^^^^^^^ - -- Fix exception in mini task scheduler (#24865) -- Fix cycle bug with attaching label to task group (#24847) -- Fix timestamp defaults for ``sensorinstance`` (#24638) -- Move fallible ``ti.task.dag`` assignment back inside ``try/except`` block (#24533) (#24592) -- Add missing types to ``FSHook`` (#24470) -- Mask secrets in ``stdout`` for ``airflow tasks test`` (#24362) -- ``DebugExecutor`` use ``ti.run()`` instead of ``ti._run_raw_task`` (#24357) -- Fix bugs in ``URI`` constructor for ``MySQL`` connection (#24320) -- Missing ``scheduleinterval`` nullable true added in ``openapi`` (#24253) -- Unify ``return_code`` interface for task runner (#24093) -- Handle occasional deadlocks in trigger with retries (#24071) -- Remove special serde logic for mapped ``op_kwargs`` (#23860) -- ``ExternalTaskSensor`` respects ``soft_fail`` if the external task enters a ``failed_state`` (#23647) -- Fix ``StatD`` timing metric units (#21106) -- Add ``cache_ok`` flag to sqlalchemy TypeDecorators. (#24499) -- Allow for ``LOGGING_LEVEL=DEBUG`` (#23360) -- Fix grid date ticks (#24738) -- Debounce status highlighting in Grid view (#24710) -- Fix Grid vertical scrolling (#24684) -- don't try to render child rows for closed groups (#24637) -- Do not calculate grid root instances (#24528) -- Maintain grid view selection on filtering upstream (#23779) -- Speed up ``grid_data`` endpoint by 10x (#24284) -- Apply per-run log templates to log handlers (#24153) -- Don't crash scheduler if exec config has old k8s objects (#24117) -- ``TI.log_url`` fix for ``map_index`` (#24335) -- Fix migration ``0080_2_0_2`` - Replace null values before setting column not null (#24585) -- Patch ``sql_alchemy_conn`` if old Postgres schemes used (#24569) -- Seed ``log_template`` table (#24511) -- Fix deprecated ``log_id_template`` value (#24506) -- Fix toast messages (#24505) -- Add indexes for CASCADE deletes for ``task_instance`` (#24488) -- Return empty dict if Pod JSON encoding fails (#24478) -- Improve grid rendering performance with a custom tooltip (#24417, #24449) -- Check for ``run_id`` for grid group summaries (#24327) -- Optimize calendar view for cron scheduled DAGs (#24262) -- Use ``get_hostname`` instead of ``socket.getfqdn`` (#24260) -- Check that edge nodes actually exist (#24166) -- Fix ``useTasks`` crash on error (#24152) -- Do not fail re-queued TIs (#23846) -- Reduce grid view API calls (#24083) -- Rename Permissions to Permission Pairs. (#24065) -- Replace ``use_task_execution_date`` with ``use_task_logical_date`` (#23983) -- Grid fix details button truncated and small UI tweaks (#23934) -- Add TaskInstance State ``REMOVED`` to finished states and success states (#23797) -- Fix mapped task immutability after clear (#23667) -- Fix permission issue for dag that has dot in name (#23510) -- Fix closing connection ``dbapi.get_pandas_df`` (#23452) -- Check bag DAG ``schedule_interval`` match timetable (#23113) -- Parse error for task added to multiple groups (#23071) -- Fix flaky order of returned dag runs (#24405) -- Migrate ``jsx`` files that affect run/task selection to ``tsx`` (#24509) -- Fix links to sources for examples (#24386) -- Set proper ``Content-Type`` and ``chartset`` on ``grid_data`` endpoint (#24375) - -Doc only changes -^^^^^^^^^^^^^^^^ - -- Update templates doc to mention ``extras`` and format Airflow ``Vars`` / ``Conns`` (#24735) -- Document built in Timetables (#23099) -- Alphabetizes two tables (#23923) -- Clarify that users should not use Maria DB (#24556) -- Add imports to deferring code samples (#24544) -- Add note about image regeneration in June 2022 (#24524) -- Small cleanup of ``get_current_context()`` chapter (#24482) -- Fix default 2.2.5 ``log_id_template`` (#24455) -- Update description of installing providers separately from core (#24454) -- Mention context variables and logging (#24304) - -Misc/Internal -^^^^^^^^^^^^^ - -- Remove internet explorer support (#24495) -- Removing magic status code numbers from ``api_connexion`` (#24050) -- Upgrade FAB to ``4.1.2`` (#24619) -- Switch Markdown engine to ``markdown-it-py`` (#19702) -- Update ``rich`` to latest version across the board. (#24186) -- Get rid of ``TimedJSONWebSignatureSerializer`` (#24519) -- Update flask-appbuilder ``authlib``/ ``oauth`` dependency (#24516) -- Upgrade to ``webpack`` 5 (#24485) -- Add ``typescript`` (#24337) -- The JWT claims in the request to retrieve logs have been standardized: we use ``nbf`` and ``aud`` claims for - maturity and audience of the requests. Also "filename" payload field is used to keep log name. (#24519) -- Address all ``yarn`` test warnings (#24722) -- Upgrade to react 18 and chakra 2 (#24430) -- Refactor ``DagRun.verify_integrity`` (#24114) -- Upgrade FAB to ``4.1.1`` (#24399) -- We now need at least ``Flask-WTF 0.15`` (#24621) - - -Airflow 2.3.2 (2022-06-04) --------------------------- - -No significant changes. - -Bug Fixes -^^^^^^^^^ - -- Run the ``check_migration`` loop at least once -- Fix grid view for mapped tasks (#24059) -- Icons in grid view for different DAG run types (#23970) -- Faster grid view (#23951) -- Disallow calling expand with no arguments (#23463) -- Add missing ``is_mapped`` field to Task response. (#23319) -- DagFileProcessorManager: Start a new process group only if current process not a session leader (#23872) -- Mask sensitive values for not-yet-running TIs (#23807) -- Add cascade to ``dag_tag`` to ``dag`` foreign key (#23444) -- Use ``--subdir`` argument value for standalone dag processor. (#23864) -- Highlight task states by hovering on legend row (#23678) -- Fix and speed up grid view (#23947) -- Prevent UI from crashing if grid task instances are null (#23939) -- Remove redundant register exit signals in ``dag-processor`` command (#23886) -- Add ``__wrapped__`` property to ``_TaskDecorator`` (#23830) -- Fix UnboundLocalError when ``sql`` is empty list in DbApiHook (#23816) -- Enable clicking on DAG owner in autocomplete dropdown (#23804) -- Simplify flash message for ``_airflow_moved`` tables (#23635) -- Exclude missing tasks from the gantt view (#23627) - -Doc only changes -^^^^^^^^^^^^^^^^ - -- Add column names for DB Migration Reference (#23853) - -Misc/Internal -^^^^^^^^^^^^^ - -- Remove pinning for xmltodict (#23992) - - -Airflow 2.3.1 (2022-05-25) --------------------------- - -Significant Changes -^^^^^^^^^^^^^^^^^^^ -No significant changes. - -Bug Fixes -^^^^^^^^^ - -- Automatically reschedule stalled queued tasks in ``CeleryExecutor`` (#23690) -- Fix expand/collapse all buttons (#23590) -- Grid view status filters (#23392) -- Expand/collapse all groups (#23487) -- Fix retrieval of deprecated non-config values (#23723) -- Fix secrets rendered in UI when task is not executed. (#22754) -- Fix provider import error matching (#23825) -- Fix regression in ignoring symlinks (#23535) -- Fix ``dag-processor`` fetch metadata database config (#23575) -- Fix auto upstream dep when expanding non-templated field (#23771) -- Fix task log is not captured (#23684) -- Add ``reschedule`` to the serialized fields for the ``BaseSensorOperator`` (#23674) -- Modify db clean to also catch the ProgrammingError exception (#23699) -- Remove titles from link buttons (#23736) -- Fix grid details header text overlap (#23728) -- Ensure ``execution_timeout`` as timedelta (#23655) -- Don't run pre-migration checks for downgrade (#23634) -- Add index for event column in log table (#23625) -- Implement ``send_callback`` method for ``CeleryKubernetesExecutor`` and ``LocalKubernetesExecutor`` (#23617) -- Fix ``PythonVirtualenvOperator`` templated_fields (#23559) -- Apply specific ID collation to ``root_dag_id`` too (#23536) -- Prevent ``KubernetesJobWatcher`` getting stuck on resource too old (#23521) -- Fix scheduler crash when expanding with mapped task that returned none (#23486) -- Fix broken dagrun links when many runs start at the same time (#23462) -- Fix: Exception when parsing log #20966 (#23301) -- Handle invalid date parsing in webserver views. (#23161) -- Pools with negative open slots should not block other pools (#23143) -- Move around overflow, position and padding (#23044) -- Change approach to finding bad rows to LEFT OUTER JOIN. (#23528) -- Only count bad refs when ``moved`` table exists (#23491) -- Visually distinguish task group summary (#23488) -- Remove color change for highly nested groups (#23482) -- Optimize 2.3.0 pre-upgrade check queries (#23458) -- Add backward compatibility for ``core__sql_alchemy_conn__cmd`` (#23441) -- Fix literal cross product expansion (#23434) -- Fix broken task instance link in xcom list (#23367) -- Fix connection test button (#23345) -- fix cli ``airflow dags show`` for mapped operator (#23339) -- Hide some task instance attributes (#23338) -- Don't show grid actions if server would reject with permission denied (#23332) -- Use run_id for ``ti.mark_success_url`` (#23330) -- Fix update user auth stats (#23314) -- Use ``
` documentation. -* Import Connection lazily in hooks to avoid cycles (#15361) -* Rename last_scheduler_run into last_parsed_time, and ensure it's updated in DB (#14581) -* Make TaskInstance.pool_slots not nullable with a default of 1 (#14406) -* Log migrations info in consistent way (#14158) +REST API: DAG Trigger Behavior Updated +"""""""""""""""""""""""""""""""""""""" -Airflow 2.0.1 (2021-02-08) --------------------------- +The behavior of the ``POST /dags/{dag_id}/dagRuns`` endpoint has changed. If a ``logical_date`` is not explicitly +provided when triggering a DAG via the REST API, it now defaults to ``None``. -Significant Changes -^^^^^^^^^^^^^^^^^^^ +This aligns with event-driven DAGs and manual runs in Airflow 3.0, but may break backward compatibility with scripts or +tools that previously relied on Airflow auto-generating a timestamped ``logical_date``. -Permission to view Airflow Configurations has been removed from ``User`` and ``Viewer`` role -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Removed CLI Flags and Commands +"""""""""""""""""""""""""""""" -Previously, Users with ``User`` or ``Viewer`` role were able to get/view configurations using -the REST API or in the Webserver. From Airflow 2.0.1, only users with ``Admin`` or ``Op`` role would be able -to get/view Configurations. +Several deprecated CLI arguments and commands that were marked for removal in earlier versions have now been cleaned up +in Airflow 3.0. Run ``airflow --help`` to review the current set of available commands and arguments. -To allow users with other roles to view configuration, add ``can read on Configurations`` permissions to that role. +- Deprecated ``--ignore-depends-on-past`` cli option is replaced by ``--depends-on-past ignore``. -Note that if ``[webserver] expose_config`` is set to ``False``\ , the API will throw a ``403`` response even if -the user has role with ``can read on Configurations`` permission. +- ``--tree`` flag for ``airflow tasks list`` command is removed. The format of the output with that flag can be + expensive to generate and extremely large, depending on the DAG. ``airflow dag show`` is a better way to + visualize the relationship of tasks in a DAG. -Default ``[celery] worker_concurrency`` is changed to ``16`` -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +- Changing ``dag_id`` from flag (``-d``, ``--dag-id``) to a positional argument in the ``dags list-runs`` CLI command. -The default value for ``[celery] worker_concurrency`` was ``16`` for Airflow <2.0.0. -However, it was unintentionally changed to ``8`` in 2.0.0. +- The ``airflow db init`` and ``airflow db upgrade`` commands have been removed. Use ``airflow db migrate`` instead + to initialize or migrate the metadata database. If you would like to create default connections use + ``airflow connections create-default-connections``. -From Airflow 2.0.1, we revert to the old default of ``16``. +- ``airflow api-server`` has replaced ``airflow webserver`` cli command. -Default ``[scheduler] min_file_process_interval`` is changed to ``30`` -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -The default value for ``[scheduler] min_file_process_interval`` was ``0``\ , -due to which the CPU Usage mostly stayed around 100% as the DAG files are parsed -constantly. +Provider Refactor & Standardization +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -From Airflow 2.0.0, the scheduling decisions have been moved from -DagFileProcessor to Scheduler, so we can keep the default a bit higher: ``30``. +Airflow 3.0 completes the migration of several core operators, sensors, and hooks into the new +``apache-airflow-providers-standard`` package. This package now includes commonly used components such as: -Bug Fixes -^^^^^^^^^ +- ``PythonOperator`` +- ``BashOperator`` +- ``EmailOperator`` +- ``ShortCircuitOperator`` -- Bugfix: Return XCom Value in the XCom Endpoint API (#13684) -- Bugfix: Import error when using custom backend and ``sql_alchemy_conn_secret`` (#13260) -- Allow PID file path to be relative when daemonize a process (scheduler, kerberos, etc) (#13232) -- Bugfix: no generic ``DROP CONSTRAINT`` in MySQL during ``airflow db upgrade`` (#13239) -- Bugfix: Sync Access Control defined in DAGs when running ``sync-perm`` (#13377) -- Stop sending Callback Requests if no callbacks are defined on DAG (#13163) -- BugFix: Dag-level Callback Requests were not run (#13651) -- Stop creating duplicate Dag File Processors (#13662) -- Filter DagRuns with Task Instances in removed State while Scheduling (#13165) -- Bump ``datatables.net`` from 1.10.21 to 1.10.22 in /airflow/www (#13143) -- Bump ``datatables.net`` JS to 1.10.23 (#13253) -- Bump ``dompurify`` from 2.0.12 to 2.2.6 in /airflow/www (#13164) -- Update minimum ``cattrs`` version (#13223) -- Remove inapplicable arg 'output' for CLI pools import/export (#13071) -- Webserver: Fix the behavior to deactivate the authentication option and add docs (#13191) -- Fix: add support for no-menu plugin views (#11742) -- Add ``python-daemon`` limit for Python 3.8+ to fix daemon crash (#13540) -- Change the default celery ``worker_concurrency`` to 16 (#13612) -- Audit Log records View should not contain link if ``dag_id`` is None (#13619) -- Fix invalid ``continue_token`` for cleanup list pods (#13563) -- Switches to latest version of snowflake connector (#13654) -- Fix backfill crash on task retry or reschedule (#13712) -- Setting ``max_tis_per_query`` to ``0`` now correctly removes the limit (#13512) -- Fix race conditions in task callback invocations (#10917) -- Fix webserver exiting when gunicorn master crashes (#13518)(#13780) -- Fix SQL syntax to check duplicate connections (#13783) -- ``BaseBranchOperator`` will push to xcom by default (#13704) (#13763) -- Fix Deprecation for ``configuration.getsection`` (#13804) -- Fix TaskNotFound in log endpoint (#13872) -- Fix race condition when using Dynamic DAGs (#13893) -- Fix: Linux/Chrome window bouncing in Webserver -- Fix db shell for sqlite (#13907) -- Only compare updated time when Serialized DAG exists (#13899) -- Fix dag run type enum query for mysqldb driver (#13278) -- Add authentication to lineage endpoint for experimental API (#13870) -- Do not add User role perms to custom roles. (#13856) -- Do not add ``Website.can_read`` access to default roles. (#13923) -- Fix invalid value error caused by long Kubernetes pod name (#13299) -- Fix DB Migration for SQLite to upgrade to 2.0 (#13921) -- Bugfix: Manual DagRun trigger should not skip scheduled runs (#13963) -- Stop loading Extra Operator links in Scheduler (#13932) -- Added missing return parameter in read function of ``FileTaskHandler`` (#14001) -- Bugfix: Do not try to create a duplicate Dag Run in Scheduler (#13920) -- Make ``v1/config`` endpoint respect webserver ``expose_config`` setting (#14020) -- Disable row level locking for Mariadb and MySQL <8 (#14031) -- Bugfix: Fix permissions to triggering only specific DAGs (#13922) -- Fix broken SLA Mechanism (#14056) -- Bugfix: Scheduler fails if task is removed at runtime (#14057) -- Remove permissions to read Configurations for User and Viewer roles (#14067) -- Fix DB Migration from 2.0.1rc1 +These operators were previously bundled inside ``airflow-core`` but are now treated as provider-managed components to +improve modularity, testability, and lifecycle independence. -Improvements -^^^^^^^^^^^^ +This change enables more consistent versioning across providers and prepares Airflow for a future where all integrations +— including "standard" ones — follow the same interface model. -- Increase the default ``min_file_process_interval`` to decrease CPU Usage (#13664) -- Dispose connections when running tasks with ``os.fork`` & ``CeleryExecutor`` (#13265) -- Make function purpose clearer in ``example_kubernetes_executor`` example dag (#13216) -- Remove unused libraries - ``flask-swagger``, ``funcsigs`` (#13178) -- Display alternative tooltip when a Task has yet to run (no TI) (#13162) -- User werkzeug's own type conversion for request args (#13184) -- UI: Add ``queued_by_job_id`` & ``external_executor_id`` Columns to TI View (#13266) -- Make ``json-merge-patch`` an optional library and unpin it (#13175) -- Adds missing LDAP "extra" dependencies to ldap provider. (#13308) -- Refactor ``setup.py`` to better reflect changes in providers (#13314) -- Pin ``pyjwt`` and Add integration tests for Apache Pinot (#13195) -- Removes provider-imposed requirements from ``setup.cfg`` (#13409) -- Replace deprecated decorator (#13443) -- Streamline & simplify ``__eq__`` methods in models Dag and BaseOperator (#13449) -- Additional properties should be allowed in provider schema (#13440) -- Remove unused dependency - ``contextdecorator`` (#13455) -- Remove 'typing' dependency (#13472) -- Log migrations info in consistent way (#13458) -- Unpin ``mysql-connector-python`` to allow ``8.0.22`` (#13370) -- Remove thrift as a core dependency (#13471) -- Add ``NotFound`` response for DELETE methods in OpenAPI YAML (#13550) -- Stop Log Spamming when ``[core] lazy_load_plugins`` is ``False`` (#13578) -- Display message and docs link when no plugins are loaded (#13599) -- Unpin restriction for ``colorlog`` dependency (#13176) -- Add missing Dag Tag for Example DAGs (#13665) -- Support tables in DAG docs (#13533) -- Add ``python3-openid`` dependency (#13714) -- Add ``__repr__`` for Executors (#13753) -- Add description to hint if ``conn_type`` is missing (#13778) -- Upgrade Azure blob to v12 (#12188) -- Add extra field to ``get_connnection`` REST endpoint (#13885) -- Make Smart Sensors DB Migration idempotent (#13892) -- Improve the error when DAG does not exist when running dag pause command (#13900) -- Update ``airflow_local_settings.py`` to fix an error message (#13927) -- Only allow passing JSON Serializable conf to ``TriggerDagRunOperator`` (#13964) -- Bugfix: Allow getting details of a DAG with null ``start_date`` (REST API) (#13959) -- Add params to the DAG details endpoint (#13790) -- Make the role assigned to anonymous users customizable (#14042) -- Retry critical methods in Scheduler loop in case of ``OperationalError`` (#14032) +To maintain compatibility with existing DAGs, the ``apache-airflow-providers-standard`` package is installable on both +Airflow 2.x and 3.x. Users upgrading from Airflow 2.x are encouraged to begin updating import paths and testing provider +installation in advance of the upgrade. -Doc only changes -^^^^^^^^^^^^^^^^ +Legacy imports such as ``airflow.operators.python.PythonOperator`` are deprecated and will be removed soon. They should be +replaced with: -- Add Missing StatsD Metrics in Docs (#13708) -- Add Missing Email configs in Configuration doc (#13709) -- Add quick start for Airflow on Docker (#13660) -- Describe which Python versions are supported (#13259) -- Add note block to 2.x migration docs (#13094) -- Add documentation about webserver_config.py (#13155) -- Add missing version information to recently added configs (#13161) -- API: Use generic information in UpdateMask component (#13146) -- Add Airflow 2.0.0 to requirements table (#13140) -- Avoid confusion in doc for CeleryKubernetesExecutor (#13116) -- Update docs link in REST API spec (#13107) -- Add link to PyPI Repository to provider docs (#13064) -- Fix link to Airflow master branch documentation (#13179) -- Minor enhancements to Sensors docs (#13381) -- Use 2.0.0 in Airflow docs & Breeze (#13379) -- Improves documentation regarding providers and custom connections (#13375)(#13410) -- Fix malformed table in production-deployment.rst (#13395) -- Update celery.rst to fix broken links (#13400) -- Remove reference to scheduler run_duration param in docs (#13346) -- Set minimum SQLite version supported (#13412) -- Fix installation doc (#13462) -- Add docs about mocking variables and connections (#13502) -- Add docs about Flask CLI (#13500) -- Fix Upgrading to 2 guide to use ``rbac`` UI (#13569) -- Make docs clear that Auth can not be disabled for Stable API (#13568) -- Remove archived links from docs & add link for AIPs (#13580) -- Minor fixes in upgrading-to-2.rst (#13583) -- Fix Link in Upgrading to 2.0 guide (#13584) -- Fix heading for Mocking section in best-practices.rst (#13658) -- Add docs on how to use custom operators within plugins folder (#13186) -- Update docs to register Operator Extra Links (#13683) -- Improvements for database setup docs (#13696) -- Replace module path to Class with just Class Name (#13719) -- Update DAG Serialization docs (#13722) -- Fix link to Apache Airflow docs in webserver (#13250) -- Clarifies differences between extras and provider packages (#13810) -- Add information about all access methods to the environment (#13940) -- Docs: Fix FAQ on scheduler latency (#13969) -- Updated taskflow api doc to show dependency with sensor (#13968) -- Add deprecated config options to docs (#13883) -- Added a FAQ section to the Upgrading to 2 doc (#13979) - -Airflow 2.0.0 (2020-12-18) --------------------------- +.. code-block:: python -The full changelog is about 3,000 lines long (already excluding everything backported to 1.10) -so please check `Airflow 2.0.0 Highlights Blog Post `_ -instead. + from airflow.providers.standard.operators.python import PythonOperator -Significant Changes -^^^^^^^^^^^^^^^^^^^ +The SimpleHttpOperator has been migrated to apache-airflow-providers-http and renamed to HttpOperator -The 2.0 release of the Airflow is a significant upgrade, and includes substantial major changes, -and some of them may be breaking. Existing code written for earlier versions of this project will may require updates -to use this version. Sometimes necessary configuration changes are also required. -This document describes the changes that have been made, and what you need to do to update your usage. +UI & Usability Improvements +^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you experience issues or have questions, please file `an issue `_. +Airflow 3.0 introduces a modernized user experience that complements the new React-based UI architecture (see +Significant Changes). Several areas of the interface have been enhanced to improve visibility, consistency, and +navigability. -Major changes +New Home Page """"""""""""" -This section describes the major changes that have been made in this release. - -The experimental REST API is disabled by default -"""""""""""""""""""""""""""""""""""""""""""""""" - -The experimental REST API is disabled by default. To restore these APIs while migrating to -the stable REST API, set ``enable_experimental_api`` option in ``[api]`` section to ``True``. - -Please note that the experimental REST API do not have access control. -The authenticated user has full access. - -SparkJDBCHook default connection -"""""""""""""""""""""""""""""""" - -For SparkJDBCHook default connection was ``spark-default``\ , and for SparkSubmitHook it was -``spark_default``. Both hooks now use the ``spark_default`` which is a common pattern for the connection -names used across all providers. +The Airflow Home page now provides a high-level operational overview of your environment. It includes health checks for +core components (Scheduler, Triggerer, DAG Processor), summary stats for DAG and task instance states, and a real-time +feed of asset-triggered events. This view helps users quickly identify pipeline health, recent activity, and potential +failures. -Changes to output argument in commands -"""""""""""""""""""""""""""""""""""""" +Unified DAG List View +"""""""""""""""""""""" -From Airflow 2.0, We are replacing `tabulate `_ with `rich `_ to render commands output. Due to this change, the ``--output`` argument -will no longer accept formats of tabulate tables. Instead, it now accepts: - - -* ``table`` - will render the output in predefined table -* ``json`` - will render the output as a json -* ``yaml`` - will render the output as yaml - -By doing this we increased consistency and gave users possibility to manipulate the -output programmatically (when using json or yaml). - -Affected commands: - - -* ``airflow dags list`` -* ``airflow dags report`` -* ``airflow dags list-runs`` -* ``airflow dags list-jobs`` -* ``airflow connections list`` -* ``airflow connections get`` -* ``airflow pools list`` -* ``airflow pools get`` -* ``airflow pools set`` -* ``airflow pools delete`` -* ``airflow pools import`` -* ``airflow pools export`` -* ``airflow role list`` -* ``airflow providers list`` -* ``airflow providers get`` -* ``airflow providers hooks`` -* ``airflow tasks states-for-dag-run`` -* ``airflow users list`` -* ``airflow variables list`` - -Azure Wasb Hook does not work together with Snowflake hook -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The WasbHook in Apache Airflow use a legacy version of Azure library. While the conflict is not -significant for most of the Azure hooks, it is a problem for Wasb Hook because the ``blob`` folders -for both libraries overlap. Installing both Snowflake and Azure extra will result in non-importable -WasbHook. - -Rename ``all`` to ``devel_all`` extra -""""""""""""""""""""""""""""""""""""""""""""" +The DAG List page has been refreshed with a cleaner layout and improved responsiveness. Users can browse DAGs by name, +tags, or owners. While full-text search has not yet been integrated, filters and navigation have been refined for +clarity in large deployments. -The ``all`` extras were reduced to include only user-facing dependencies. This means -that this extra does not contain development dependencies. If you were relying on -``all`` extra then you should use now ``devel_all`` or figure out if you need development -extras at all. +Version-Aware Graph and Grid Views +""""""""""""""""""""""""""""""""""" -Context variables ``prev_execution_date_success`` and ``prev_execution_date_success`` are now ``pendulum.DateTime`` -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +The Graph and Grid views now display task information in the context of the DAG version that was used at runtime. This +improves traceability for DAGs that evolve over time and provides more accurate debugging of historical runs. -Rename policy to task_policy -"""""""""""""""""""""""""""" +Expanded DAG Graph Visualization +"""""""""""""""""""""""""""""""" -Because Airflow introduced DAG level policy (\ ``dag_policy``\ ) we decided to rename existing ``policy`` -function to ``task_policy`` to make the distinction more profound and avoid any confusion. +The Graph view now supports visualizing the full chain of asset and task dependencies, including assets consumed or +produced across DAG boundaries. This allows users to inspect upstream and downstream lineage in a unified view, making +it easier to trace data flows, debug triggering behavior, and understand conditional dependencies between assets and +tasks. -Users using cluster policy need to rename their ``policy`` functions in ``airflow_local_settings.py`` -to ``task_policy``. +DAG Code View +""""""""""""" -Default value for ``[celery] operation_timeout`` has changed to ``1.0`` -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +The "Code" tab now displays the exact DAG source as parsed by the scheduler for the selected DAG version. This allows +users to inspect the precise code that was executed, even for historical runs, and helps debug issues related to +versioned DAG changes. -From Airflow 2, by default Airflow will retry 3 times to publish task to Celery broker. This is controlled by -``[celery] task_publish_max_retries``. Because of this we can now have a lower Operation timeout that raises -``AirflowTaskTimeout``. This generally occurs during network blips or intermittent DNS issues. +Improved Task Log Access +""""""""""""""""""""""""" -Adding Operators and Sensors via plugins is no longer supported -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Task log access has been streamlined across views. Logs are now easier to access from both the Grid and Task Instance +pages, with cleaner formatting and reduced visual noise. -Operators and Sensors should no longer be registered or imported via Airflow's plugin mechanism -- these types of classes are just treated as plain python classes by Airflow, so there is no need to register them with Airflow. +Enhanced Asset and Backfill Views +"""""""""""""""""""""""""""""""""" -If you previously had a ``plugins/my_plugin.py`` and you used it like this in a DAG: +New UI components support asset-centric DAGs and backfill workflows: -.. code-block:: +- Asset definitions are now visible from the DAG details page, allowing users to inspect upstream and downstream asset relationships. +- Backfills can be triggered and monitored directly from the UI, including support for scheduler-managed backfills introduced in Airflow 3.0. - from airflow.operators.my_plugin import MyOperator +These improvements make Airflow more accessible to operators, data engineers, and stakeholders working across both +time-based and event-driven workflows. -You should instead import it as: +Deprecations & Removals +^^^^^^^^^^^^^^^^^^^^^^^^ -.. code-block:: +A number of deprecated features, modules, and interfaces have been removed in Airflow 3.0, completing long-standing +migrations and cleanups. - from my_plugin import MyOperator +Users are encouraged to review the following removals to ensure compatibility: -The name under ``airflow.operators.`` was the plugin name, where as in the second example it is the python module name where the operator is defined. +- **SubDag support has been removed** entirely, including the ``SubDagOperator``, related CLI and API interfaces. TaskGroups are now the recommended alternative for nested DAG structures. -See https://airflow.apache.org/docs/apache-airflow/stable/howto/custom-operator.html for more info. +- **SLAs have been removed**: The legacy SLA feature, including SLA callbacks and metrics, has been removed. A more flexible replacement mechanism, ``DeadlineAlerts``, is planned for a future version of Airflow. Users who relied on SLA-based notifications should consider implementing custom alerting using task-level success/failure hooks or external monitoring integrations. -Importing Hooks via plugins is no longer supported -"""""""""""""""""""""""""""""""""""""""""""""""""" +- **Pickling support has been removed**: All legacy features related to DAG pickling have been fully removed. This includes the ``PickleDag`` CLI/API, as well as implicit behaviors around ``store_serialized_dags = False``. DAGs must now be serialized using the JSON-based serialization system. Ensure any custom Python objects used in DAGs are JSON-serializable. -Importing hooks added in plugins via ``airflow.hooks.`` is no longer supported, and hooks should just be imported as regular python modules. +- **Context parameter cleanup**: Several previously available context variables have been removed from the task execution context, including ``conf``, ``execution_date``, and ``dag_run.external_trigger``. These values are either no longer applicable or have been renamed (e.g., use ``dag_run.logical_date`` instead of ``execution_date``). DAG authors should ensure that templated fields and Python callables do not reference these deprecated keys. -.. code-block:: +- **Deprecated core imports** have been fully removed. Any use of ``airflow.operators.*``, ``airflow.hooks.*``, or similar legacy import paths should be updated to import from their respective providers. - from airflow.hooks.my_plugin import MyHook +- **Configuration cleanup**: Several legacy config options have been removed, including: -You should instead import it as: + - ``scheduler.allow_trigger_in_future``: DAG runs can no longer be triggered with a future logical date. Use ``logical_date=None`` instead. + - ``scheduler.use_job_schedule`` and ``scheduler.use_local_tz`` have also been removed. These options were deprecated and no longer had any effect. -.. code-block:: +- **Deprecated utility methods** such as those in ``airflow.utils.helpers``, ``airflow.utils.process_utils``, and ``airflow.utils.timezone`` have been removed. Equivalent functionality can now be found in the standard Python library or Airflow provider modules. - from my_plugin import MyHook +- **Removal of deprecated CLI flags and behavior**: Several CLI entrypoints and arguments that were marked for removal in earlier versions have been cleaned up. -It is still possible (but not required) to "register" hooks in plugins. This is to allow future support for dynamically populating the Connections form in the UI. +To assist with the upgrade, tools like ``ruff`` (e.g., rule ``AIR302``) and ``airflow config lint`` can help identify +obsolete imports and configuration keys. These utilities are recommended for locating and resolving common +incompatibilities during migration. Please see :doc:`Upgrade Guide ` for more +information. -See https://airflow.apache.org/docs/apache-airflow/stable/howto/custom-operator.html for more info. +Summary of Removed Features +""""""""""""""""""""""""""" -The default value for ``[core] enable_xcom_pickling`` has been changed to ``False`` -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +The following table summarizes user-facing features removed in 3.0 and their recommended replacements. Not all of these +are called out individually above. -The pickle type for XCom messages has been replaced to JSON by default to prevent RCE attacks. -Note that JSON serialization is stricter than pickling, so for example if you want to pass -raw bytes through XCom you must encode them using an encoding like ``base64``. -If you understand the risk and still want to use `pickling `_\ , -set ``enable_xcom_pickling = True`` in your Airflow config's ``core`` section. ++-------------------------------------------+----------------------------------------------------------+ +| **Feature** | **Replacement / Notes** | ++===========================================+==========================================================+ +| SubDagOperator / SubDAGs | Use TaskGroups | ++-------------------------------------------+----------------------------------------------------------+ +| SLA callbacks / metrics | Deadline Alerts (planned post-3.0) | ++-------------------------------------------+----------------------------------------------------------+ +| DAG Pickling | Use JSON serialization; pickling is no longer supported | ++-------------------------------------------+----------------------------------------------------------+ +| Xcom Pickling | Use custom Xcom backend; pickling is no longer supported | ++-------------------------------------------+----------------------------------------------------------+ +| ``execution_date`` context var | Use ``dag_run.logical_date`` | ++-------------------------------------------+----------------------------------------------------------+ +| ``conf`` and ``dag_run.external_trigger`` | Removed from context; use DAG params or ``dag_run`` APIs | ++-------------------------------------------+----------------------------------------------------------+ +| Core ``EmailOperator`` | Use ``EmailOperator`` from the ``smtp`` provider | ++-------------------------------------------+----------------------------------------------------------+ +| ``none_failed_or_skipped`` rule | Use ``none_failed_min_one_success`` | ++-------------------------------------------+----------------------------------------------------------+ +| ``dummy`` trigger rule | Use ``always`` | ++-------------------------------------------+----------------------------------------------------------+ +| ``fail_stop`` argument | Use ``fail_fast`` | ++-------------------------------------------+----------------------------------------------------------+ +| ``store_serialized_dags=False`` | DAGs are always serialized; config has no effect | ++-------------------------------------------+----------------------------------------------------------+ +| Deprecated core imports | Import from appropriate provider package | ++-------------------------------------------+----------------------------------------------------------+ +| ``SequentialExecutor`` & ``DebugExecutor``| Use LocalExecutor for testing | ++-------------------------------------------+----------------------------------------------------------+ +| ``.airflowignore`` regex | Uses glob syntax by default | ++-------------------------------------------+----------------------------------------------------------+ -Airflowignore of base path -"""""""""""""""""""""""""" +Migration Tooling & Upgrade Process +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -There was a bug fixed in https://github.com/apache/airflow/pull/11993 that the "airflowignore" checked -the base path of the dag folder for forbidden dags, not only the relative part. This had the effect -that if the base path contained the excluded word the whole dag folder could have been excluded. For -example if the airflowignore file contained x, and the dags folder was '/var/x/dags', then all dags in -the folder would be excluded. The fix only matches the relative path only now which means that if you -previously used full path as ignored, you should change it to relative one. For example if your dag -folder was '/var/dags/' and your airflowignore contained '/var/dag/excluded/', you should change it -to 'excluded/'. +Airflow 3 was designed with migration in mind. Many Airflow 2 DAGs will work without changes, especially if deprecation +warnings were addressed in earlier releases. To support the upgrade, Airflow 3 includes validation tools such as ``ruff`` +and ``airflow config update``, as well as a simplified startup model. -``ExternalTaskSensor`` provides all task context variables to ``execution_date_fn`` as keyword arguments -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +For a step-by-step upgrade process, see the :doc:`Upgrade Guide `. -The old syntax of passing ``context`` as a dictionary will continue to work with the caveat that the argument must be named ``context``. The following will break. To fix it, change ``ctx`` to ``context``. +Minimum Supported Versions +""""""""""""""""""""""""""" -.. code-block:: python +To upgrade to Airflow 3.0, you must be running **Airflow 2.7 or later**. - def execution_date_fn(execution_date, ctx): ... +Airflow 3.0 supports the following Python versions: -``execution_date_fn`` can take in any number of keyword arguments available in the task context dictionary. The following forms of ``execution_date_fn`` are all supported: +- Python 3.9 +- Python 3.10 +- Python 3.11 +- Python 3.12 -.. code-block:: python +Earlier versions of Airflow or Python are not supported due to architectural changes and updated dependency requirements. - def execution_date_fn(dt): ... +DAG Compatibility Checks +""""""""""""""""""""""""" +Airflow now includes a Ruff-based linter with custom rules to detect DAG patterns and interfaces that are no longer +compatible with Airflow 3.0. These checks are packaged under the ``AIR30x`` rule series. Example usage: - def execution_date_fn(execution_date): ... +.. code-block:: bash + ruff check dags/ --select AIR301 --preview + ruff check dags/ --select AIR301 --fix --preview - def execution_date_fn(execution_date, ds_nodash): ... +These checks can automatically fix many common issues such as renamed arguments, removed imports, or legacy context +variable usage. +Configuration Migration +""""""""""""""""""""""" - def execution_date_fn(execution_date, ds_nodash, dag): ... +Airflow 3.0 introduces a new utility to validate and upgrade your Airflow configuration file: -The default value for ``[webserver] cookie_samesite`` has been changed to ``Lax`` -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +.. code-block:: bash -As `recommended `_ by Flask, the -``[webserver] cookie_samesite`` has been changed to ``Lax`` from ``''`` (empty string) . - -Changes to import paths -~~~~~~~~~~~~~~~~~~~~~~~ - -Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib -package was supported by the community. The project was passed to the Apache community and currently the -entire code is maintained by the community, so now the division has no justification, and it is only due -to historical reasons. In Airflow 2.0, we want to organize packages and move integrations -with third party services to the ``airflow.providers`` package. - -All changes made are backward compatible, but if you use the old import paths you will -see a deprecation warning. The old import paths can be abandoned in the future. - -According to `AIP-21 `_ -``_operator`` suffix has been removed from operators. A deprecation warning has also been raised for paths -importing with the suffix. - -The following table shows changes in import paths. - -.. list-table:: - :header-rows: 1 - - * - Old path - - New path - * - ``airflow.hooks.base_hook.BaseHook`` - - ``airflow.hooks.base.BaseHook`` - * - ``airflow.hooks.dbapi_hook.DbApiHook`` - - ``airflow.hooks.dbapi.DbApiHook`` - * - ``airflow.operators.dummy_operator.DummyOperator`` - - ``airflow.operators.dummy.DummyOperator`` - * - ``airflow.operators.dagrun_operator.TriggerDagRunOperator`` - - ``airflow.operators.trigger_dagrun.TriggerDagRunOperator`` - * - ``airflow.operators.branch_operator.BaseBranchOperator`` - - ``airflow.operators.branch.BaseBranchOperator`` - * - ``airflow.operators.subdag_operator.SubDagOperator`` - - ``airflow.operators.subdag.SubDagOperator`` - * - ``airflow.sensors.base_sensor_operator.BaseSensorOperator`` - - ``airflow.sensors.base.BaseSensorOperator`` - * - ``airflow.sensors.date_time_sensor.DateTimeSensor`` - - ``airflow.sensors.date_time.DateTimeSensor`` - * - ``airflow.sensors.external_task_sensor.ExternalTaskMarker`` - - ``airflow.sensors.external_task.ExternalTaskMarker`` - * - ``airflow.sensors.external_task_sensor.ExternalTaskSensor`` - - ``airflow.sensors.external_task.ExternalTaskSensor`` - * - ``airflow.sensors.sql_sensor.SqlSensor`` - - ``airflow.sensors.sql.SqlSensor`` - * - ``airflow.sensors.time_delta_sensor.TimeDeltaSensor`` - - ``airflow.sensors.time_delta.TimeDeltaSensor`` - * - ``airflow.contrib.sensors.weekday_sensor.DayOfWeekSensor`` - - ``airflow.sensors.weekday.DayOfWeekSensor`` - - -Database schema changes -""""""""""""""""""""""" + airflow config update + airflow config update --fix -In order to migrate the database, you should use the command ``airflow db upgrade``\ , but in -some cases manual steps are required. +This utility detects removed or deprecated configuration options and, if desired, updates them in-place. -Unique conn_id in connection table -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Additional validation is available via: -Previously, Airflow allowed users to add more than one connection with the same ``conn_id`` and on access it would choose one connection randomly. This acted as a basic load balancing and fault tolerance technique, when used in conjunction with retries. +.. code-block:: bash -This behavior caused some confusion for users, and there was no clear evidence if it actually worked well or not. + airflow config lint -Now the ``conn_id`` will be unique. If you already have duplicates in your metadata database, you will have to manage those duplicate connections before upgrading the database. +This command surfaces obsolete configuration keys and helps align your environment with Airflow 3.0 requirements. -Not-nullable conn_type column in connection table -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Metadata Database Upgrade +""""""""""""""""""""""""" -The ``conn_type`` column in the ``connection`` table must contain content. Previously, this rule was enforced -by application logic, but was not enforced by the database schema. +As with previous major releases, the Airflow 3.0 upgrade includes schema changes to the metadata database. Before +upgrading, it is strongly recommended that you back up your database and optionally run: -If you made any modifications to the table directly, make sure you don't have -null in the ``conn_type`` column. +.. code-block:: bash -Configuration changes -""""""""""""""""""""" + airflow db clean -This release contains many changes that require a change in the configuration of this application or -other application that integrate with it. +to remove old task instance, log, or XCom data. To apply the new schema: -This section describes the changes that have been made, and what you need to do to. +.. code-block:: bash -airflow.contrib.utils.log has been moved -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + airflow db migrate -Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib -package was supported by the community. The project was passed to the Apache community and currently the -entire code is maintained by the community, so now the division has no justification, and it is only due -to historical reasons. In Airflow 2.0, we want to organize packages and move integrations -with third party services to the ``airflow.providers`` package. +Startup Behavior Changes +""""""""""""""""""""""""" -To clean up, the following packages were moved: +Airflow components are now started explicitly. For example: -.. list-table:: - :header-rows: 1 +.. code-block:: bash - * - Old package - - New package - * - ``airflow.contrib.utils.log`` - - ``airflow.utils.log`` - * - ``airflow.utils.log.gcs_task_handler`` - - ``airflow.providers.google.cloud.log.gcs_task_handler`` - * - ``airflow.utils.log.wasb_task_handler`` - - ``airflow.providers.microsoft.azure.log.wasb_task_handler`` - * - ``airflow.utils.log.stackdriver_task_handler`` - - ``airflow.providers.google.cloud.log.stackdriver_task_handler`` - * - ``airflow.utils.log.s3_task_handler`` - - ``airflow.providers.amazon.aws.log.s3_task_handler`` - * - ``airflow.utils.log.es_task_handler`` - - ``airflow.providers.elasticsearch.log.es_task_handler`` - * - ``airflow.utils.log.cloudwatch_task_handler`` - - ``airflow.providers.amazon.aws.log.cloudwatch_task_handler`` + airflow api-server # Replaces airflow webserver + airflow dag-processor # Required in all environments -You should update the import paths if you are setting log configurations with the ``logging_config_class`` option. -The old import paths still works but can be abandoned. +These changes reflect Airflow's new service-oriented architecture. -SendGrid emailer has been moved -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Resources +^^^^^^^^^ -Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib -package was supported by the community. The project was passed to the Apache community and currently the -entire code is maintained by the community, so now the division has no justification, and it is only due -to historical reasons. +- :doc:`Upgrade Guide ` +- `Airflow AIPs `_ -To clean up, the ``send_mail`` function from the ``airflow.contrib.utils.sendgrid`` module has been moved. +Airflow 3.0 represents more than a year of collaboration across hundreds of contributors and dozens of organizations. We +thank everyone who helped shape this release through design discussions, code contributions, testing, documentation, and +community feedback. For full details, migration guidance, and upgrade best practices, refer to the official Upgrade +Guide and join the conversation on the Airflow dev and user mailing lists. -If your configuration file looks like this: +Airflow 2.11.0 (2025-05-20) +--------------------------- -.. code-block:: ini +Significant Changes +^^^^^^^^^^^^^^^^^^^ - [email] - email_backend = airflow.contrib.utils.sendgrid.send_email +``DeltaTriggerTimetable`` for trigger-based scheduling (#47074) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -It should look like this now: +This change introduces DeltaTriggerTimetable, a new built-in timetable that complements the existing suite of +Airflow timetables by supporting delta-based trigger schedules without relying on data intervals. -.. code-block:: ini +Airflow currently has two major types of timetables: + - Data interval-based (e.g., ``CronDataIntervalTimetable``, ``DeltaDataIntervalTimetable``) + - Trigger-based (e.g., ``CronTriggerTimetable``) - [email] - email_backend = airflow.providers.sendgrid.utils.emailer.send_email +However, there was no equivalent trigger-based option for delta intervals like ``timedelta(days=1)``. +As a result, even simple schedules like ``schedule=timedelta(days=1)`` were interpreted through a data interval +lens—adding unnecessary complexity for users who don't care about upstream/downstream data dependencies. -The old configuration still works but can be abandoned. +This feature is backported to Airflow 2.11.0 to help users begin transitioning before upgrading to Airflow 3.0. -Unify ``hostname_callable`` option in ``core`` section -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + - In Airflow 2.11, ``schedule=timedelta(...)`` still defaults to ``DeltaDataIntervalTimetable``. + - A new config option ``[scheduler] create_delta_data_intervals`` (default: ``True``) allows opting in to ``DeltaTriggerTimetable``. + - In Airflow 3.0, this config defaults to ``False``, meaning ``DeltaTriggerTimetable`` becomes the default for timedelta schedules. -The previous option used a colon(\ ``:``\ ) to split the module from function. Now the dot(\ ``.``\ ) is used. +By flipping this config in 2.11, users can preview and adopt the new scheduling behavior in advance — minimizing surprises during upgrade. -The change aims to unify the format of all options that refer to objects in the ``airflow.cfg`` file. -Custom executors is loaded using full import path -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Consistent timing metrics across all backends (#39908, #43966) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -In previous versions of Airflow it was possible to use plugins to load custom executors. It is still -possible, but the configuration has changed. Now you don't have to create a plugin to configure a -custom executor, but you need to provide the full path to the module in the ``executor`` option -in the ``core`` section. The purpose of this change is to simplify the plugin mechanism and make -it easier to configure executor. +Previously, Airflow reported timing metrics in milliseconds for ``StatsD`` but in seconds for other backends +such as ``OpenTelemetry`` and ``Datadog``. This inconsistency made it difficult to interpret or compare +timing metrics across systems. -If your module was in the path ``my_acme_company.executors.MyCustomExecutor`` and the plugin was -called ``my_plugin`` then your configuration looks like this +Airflow 2.11 introduces a new config option: -.. code-block:: ini + - ``[metrics] timer_unit_consistency`` (default: ``False`` in 2.11, ``True`` and dropped in Airflow 3.0). - [core] - executor = my_plugin.MyCustomExecutor +When enabled, all timing metrics are consistently reported in milliseconds, regardless of the backend. -And now it should look like this: +This setting has become mandatory and always ``True`` in Airflow 3.0 (the config will be removed), so +enabling it in 2.11 allows users to migrate early and avoid surprises during upgrade. -.. code-block:: ini +Ease migration to Airflow 3 +""""""""""""""""""""""""""" +This release introduces several changes to help users prepare for upgrading to Airflow 3: - [core] - executor = my_acme_company.executors.MyCustomExecutor + - All models using ``execution_date`` now also include a ``logical_date`` field. Airflow 3 drops ``execution_date`` entirely in favor of ``logical_date`` (#44283) + - Added ``airflow config lint`` and ``airflow config update`` commands in 2.11 to help audit and migrate configs for Airflow 3.0. (#45736, #50353, #46757) -The old configuration is still works but can be abandoned at any time. +Python 3.8 support removed +"""""""""""""""""""""""""" +Support for Python 3.8 has been removed, as it has reached end-of-life. +Airflow 2.11 requires Python 3.9, 3.10, 3.11, or 3.12. -Use ``CustomSQLAInterface`` instead of ``SQLAInterface`` for custom data models. -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +New Features +"""""""""""" -From Airflow 2.0, if you want to define your own Flask App Builder data models you need to use CustomSQLAInterface -instead of SQLAInterface. +- Introduce ``DeltaTriggerTimetable`` (#47074) +- Backport ``airflow config update`` and ``airflow config lint`` changes to ease migration to Airflow 3 (#45736, #50353) +- Add link to show task in a DAG in DAG Dependencies view (#47721) +- Align timers and timing metrics (ms) across all metrics loggers (#39908, #43966) -For Non-RBAC replace: +Bug Fixes +""""""""" -.. code-block:: python +- Don't resolve path for DAGs folder (#46877) +- Fix ``ti.log_url`` timestamp format from ``"%Y-%m-%dT%H:%M:%S%z"`` to ``"%Y-%m-%dT%H:%M:%S.%f%z"`` (#50306) +- Ensure that the generated ``airflow.cfg`` contains a random ``fernet_key`` and ``secret_key`` (#47755) +- Fixed setting ``rendered_map_index`` via internal api (#49057) +- Store rendered_map_index from ``TaskInstancePydantic`` into ``TaskInstance`` (#48571) +- Allow using ``log_url`` property on ``TaskInstancePydantic`` (Internal API) (#50560) +- Fix Trigger Form with Empty Object Default (#46872) +- Fix ``TypeError`` when deserializing task with ``execution_timeout`` set to ``None`` (#46822) +- Always populate mapped tasks (#46790) +- Ensure ``check_query_exists`` returns a bool (#46707) +- UI: ``/xcom/list`` got exception when applying filter on the ``value`` column (#46053) +- Allow to set note field via the experimental internal api (#47769) - from flask_appbuilder.models.sqla.interface import SQLAInterface +Miscellaneous +""""""""""""" - datamodel = SQLAInterface(your_data_model) +- Add ``logical_date`` to models using ``execution_date`` (#44283) +- Drop support for Python 3.8 (#49980, #50015) +- Emit warning for deprecated ``BaseOperatorLink.get_link`` signature (#46448) -with RBAC (in 1.10): +Doc Only Changes +"""""""""""""""" +- Unquote executor ``airflow.cfg`` variable (#48084) +- Update ``XCom`` docs to show examples of pushing multiple ``XComs`` (#46284, #47068) -.. code-block:: python +Airflow 2.10.5 (2025-02-10) +--------------------------- - from airflow.www_rbac.utils import CustomSQLAInterface +Significant Changes +^^^^^^^^^^^^^^^^^^^ - datamodel = CustomSQLAInterface(your_data_model) +Ensure teardown tasks are executed when DAG run is set to failed (#45530) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -and in 2.0: +Previously when a DAG run was manually set to "failed" or to "success" state the terminal state was set to all tasks. +But this was a gap for cases when setup- and teardown tasks were defined: If teardown was used to clean-up infrastructure +or other resources, they were also skipped and thus resources could stay allocated. -.. code-block:: python +As of now when setup tasks had been executed before and the DAG is manually set to "failed" or "success" then teardown +tasks are executed. Teardown tasks are skipped if the setup was also skipped. - from airflow.www.utils import CustomSQLAInterface +As a side effect this means if the DAG contains teardown tasks, then the manual marking of DAG as "failed" or "success" +will need to keep the DAG in running state to ensure that teardown tasks will be scheduled. They would not be scheduled +if the DAG is directly set to "failed" or "success". - datamodel = CustomSQLAInterface(your_data_model) -Drop plugin support for stat_name_handler -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" -In previous version, you could use plugins mechanism to configure ``stat_name_handler``. You should now use the ``stat_name_handler`` -option in ``[scheduler]`` section to achieve the same effect. +- Prevent using ``trigger_rule=TriggerRule.ALWAYS`` in a task-generated mapping within bare tasks (#44751) +- Fix ShortCircuitOperator mapped tasks (#44912) +- Fix premature evaluation of tasks with certain trigger rules (e.g. ``ONE_DONE``) in a mapped task group (#44937) +- Fix task_id validation in BaseOperator (#44938) (#44938) +- Allow fetching XCom with forward slash from the API and escape it in the UI (#45134) +- Fix ``FileTaskHandler`` only read from default executor (#46000) +- Fix empty task instance for log (#45702) (#45703) +- Remove ``skip_if`` and ``run_if`` decorators before TaskFlow virtualenv tasks are run (#41832) (#45680) +- Fix request body for json requests in event log (#45546) (#45560) +- Ensure teardown tasks are executed when DAG run is set to failed (#45530) (#45581) +- Do not update DR on TI update after task execution (#45348) +- Fix object and array DAG params that have a None default (#45313) (#45315) +- Fix endless sensor rescheduling (#45224) (#45250) +- Evaluate None in SQLAlchemy's extended JSON type decorator (#45119) (#45120) +- Allow dynamic tasks to be filtered by ``rendered_map_index`` (#45109) (#45122) +- Handle relative paths when sanitizing URLs (#41995) (#45080) +- Set Autocomplete Off on Login Form (#44929) (#44940) +- Add Webserver parameters ``max_form_parts``, ``max_form_memory_size`` (#46243) (#45749) +- Fixed accessing thread local variable in BaseOperators ``execute`` safeguard mechanism (#44646) (#46280) +- Add map_index parameter to extra links API (#46337) -If your plugin looked like this and was available through the ``test_plugin`` path: -.. code-block:: python +Miscellaneous +""""""""""""" - def my_stat_name_handler(stat): - return stat +- Add traceback log output when SIGTERMs was sent (#44880) (#45077) +- Removed the ability for Operators to specify their own "scheduling deps" (#45713) (#45742) +- Deprecate ``conf`` from Task Context (#44993) - class AirflowTestPlugin(AirflowPlugin): - name = "test_plugin" - stat_name_handler = my_stat_name_handler +Airflow 2.10.4 (2024-12-16) +--------------------------- -then your ``airflow.cfg`` file should look like this: +Significant Changes +^^^^^^^^^^^^^^^^^^^ -.. code-block:: ini +TaskInstance ``priority_weight`` is capped in 32-bit signed integer ranges (#43611) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - [scheduler] - stat_name_handler=test_plugin.my_stat_name_handler +Some database engines are limited to 32-bit integer values. As some users reported errors in +weight rolled-over to negative values, we decided to cap the value to the 32-bit integer. Even +if internally in python smaller or larger values to 64 bit are supported, ``priority_weight`` is +capped and only storing values from -2147483648 to 2147483647. -This change is intended to simplify the statsd configuration. +Bug Fixes +^^^^^^^^^ -Logging configuration has been moved to new section -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- Fix stats of dynamic mapped tasks after automatic retries of failed tasks (#44300) +- Fix wrong display of multi-line messages in the log after filtering (#44457) +- Allow "/" in metrics validator (#42934) (#44515) +- Fix gantt flickering (#44488) (#44517) +- Fix problem with inability to remove fields from Connection form (#40421) (#44442) +- Check pool_slots on partial task import instead of execution (#39724) (#42693) +- Avoid grouping task instance stats by try_number for dynamic mapped tasks (#44300) (#44319) +- Re-queue task when they are stuck in queued (#43520) (#44158) +- Suppress the warnings where we check for sensitive values (#44148) (#44167) +- Fix get_task_instance_try_details to return appropriate schema (#43830) (#44133) +- Log message source details are grouped (#43681) (#44070) +- Fix duplication of Task tries in the UI (#43891) (#43950) +- Add correct mime-type in OpenAPI spec (#43879) (#43901) +- Disable extra links button if link is null or empty (#43844) (#43851) +- Disable XCom list ordering by execution_date (#43680) (#43696) +- Fix venv numpy example which needs to be 1.26 at least to be working in Python 3.12 (#43659) +- Fix Try Selector in Mapped Tasks also on Index 0 (#43590) (#43591) +- Prevent using ``trigger_rule="always"`` in a dynamic mapped task (#43810) +- Prevent using ``trigger_rule=TriggerRule.ALWAYS`` in a task-generated mapping within bare tasks (#44751) -The following configurations have been moved from ``[core]`` to the new ``[logging]`` section. +Doc Only Changes +"""""""""""""""" +- Update XCom docs around containers/helm (#44570) (#44573) +Miscellaneous +""""""""""""" +- Raise deprecation warning when accessing inlet or outlet events through str (#43922) -* ``base_log_folder`` -* ``remote_logging`` -* ``remote_log_conn_id`` -* ``remote_base_log_folder`` -* ``encrypt_s3_logs`` -* ``logging_level`` -* ``fab_logging_level`` -* ``logging_config_class`` -* ``colored_console_log`` -* ``colored_log_format`` -* ``colored_formatter_class`` -* ``log_format`` -* ``simple_log_format`` -* ``task_log_prefix_template`` -* ``log_filename_template`` -* ``log_processor_filename_template`` -* ``dag_processor_manager_log_location`` -* ``task_log_reader`` -Metrics configuration has been moved to new section -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow 2.10.3 (2024-11-05) +--------------------------- -The following configurations have been moved from ``[scheduler]`` to the new ``[metrics]`` section. +Significant Changes +^^^^^^^^^^^^^^^^^^^ +No significant changes. -* ``statsd_on`` -* ``statsd_host`` -* ``statsd_port`` -* ``statsd_prefix`` -* ``statsd_allow_list`` -* ``stat_name_handler`` -* ``statsd_datadog_enabled`` -* ``statsd_datadog_tags`` -* ``statsd_custom_client_path`` +Bug Fixes +""""""""" +- Improves the handling of value masking when setting Airflow variables for enhanced security. (#43123) (#43278) +- Adds support for task_instance_mutation_hook to handle mapped operators with index 0. (#42661) (#43089) +- Fixes executor cleanup to properly handle zombie tasks when task instances are terminated. (#43065) +- Adds retry logic for HTTP 502 and 504 errors in internal API calls to handle webserver startup issues. (#42994) (#43044) +- Restores the use of separate sessions for writing and deleting RTIF data to prevent StaleDataError. (#42928) (#43012) +- Fixes PythonOperator error by replacing hyphens with underscores in DAG names. (#42993) +- Improving validation of task retries to handle None values (#42532) (#42915) +- Fixes error handling in dataset managers when resolving dataset aliases into new datasets (#42733) +- Enables clicking on task names in the DAG Graph View to correctly select the corresponding task. (#38782) (#42697) +- Prevent redirect loop on /home with tags/last run filters (#42607) (#42609) (#42628) +- Support of host.name in OTEL metrics and usage of OTEL_RESOURCE_ATTRIBUTES in metrics (#42428) (#42604) +- Reduce eyestrain in dark mode with reduced contrast and saturation (#42567) (#42583) +- Handle ENTER key correctly in trigger form and allow manual JSON (#42525) (#42535) +- Ensure DAG trigger form submits with updated parameters upon keyboard submit (#42487) (#42499) +- Do not attempt to provide not ``stringified`` objects to UI via xcom if pickling is active (#42388) (#42486) +- Fix the span link of task instance to point to the correct span in the scheduler_job_loop (#42430) (#42480) +- Bugfix task execution from runner in Windows (#42426) (#42478) +- Allows overriding the hardcoded OTEL_SERVICE_NAME with an environment variable (#42242) (#42441) +- Improves trigger performance by using ``selectinload`` instead of ``joinedload`` (#40487) (#42351) +- Suppress warnings when masking sensitive configs (#43335) (#43337) +- Masking configuration values irrelevant to DAG author (#43040) (#43336) +- Execute templated bash script as file in BashOperator (#43191) +- Fixes schedule_downstream_tasks to include upstream tasks for one_success trigger rule (#42582) (#43299) +- Add retry logic in the scheduler for updating trigger timeouts in case of deadlocks. (#41429) (#42651) +- Mark all tasks as skipped when failing a dag_run manually (#43572) +- Fix ``TrySelector`` for Mapped Tasks in Logs and Details Grid Panel (#43566) +- Conditionally add OTEL events when processing executor events (#43558) (#43567) +- Fix broken stat ``scheduler_loop_duration`` (#42886) (#43544) +- Ensure total_entries in /api/v1/dags (#43377) (#43429) +- Include limit and offset in request body schema for List task instances (batch) endpoint (#43479) +- Don't raise a warning in ExecutorSafeguard when execute is called from an extended operator (#42849) (#43577) -Changes to Elasticsearch logging provider -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Miscellaneous +""""""""""""" +- Deprecate session auth backend (#42911) +- Removed unicodecsv dependency for providers with Airflow version 2.8.0 and above (#42765) (#42970) +- Remove the referrer from Webserver to Scarf (#42901) (#42942) +- Bump ``dompurify`` from 2.2.9 to 2.5.6 in /airflow/www (#42263) (#42270) +- Correct docstring format in _get_template_context (#42244) (#42272) +- Backport: Bump Flask-AppBuilder to ``4.5.2`` (#43309) (#43318) +- Check python version that was used to install pre-commit venvs (#43282) (#43310) +- Resolve warning in Dataset Alias migration (#43425) -When JSON output to stdout is enabled, log lines will now contain the ``log_id`` & ``offset`` fields, this should make reading task logs from elasticsearch on the webserver work out of the box. Example configuration: +Doc Only Changes +"""""""""""""""" +- Clarifying PLUGINS_FOLDER permissions by DAG authors (#43022) (#43029) +- Add templating info to TaskFlow tutorial (#42992) +- Airflow local settings no longer importable from dags folder (#42231) (#42603) +- Fix documentation for cpu and memory usage (#42147) (#42256) +- Fix instruction for docker compose (#43119) (#43321) +- Updates documentation to reflect that dag_warnings is returned instead of import_errors. (#42858) (#42888) -.. code-block:: ini - [logging] - remote_logging = True - [elasticsearch] - host = http://es-host:9200 - write_stdout = True - json_format = True +Airflow 2.10.2 (2024-09-18) +--------------------------- -Note that the webserver expects the log line data itself to be present in the ``message`` field of the document. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Remove gcp_service_account_keys option in airflow.cfg file -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +No significant changes. -This option has been removed because it is no longer supported by the Google Kubernetes Engine. The new -recommended service account keys for the Google Cloud management method is -`Workload Identity `_. +Bug Fixes +""""""""" +- Revert "Fix: DAGs are not marked as stale if the dags folder change" (#42220, #42217) +- Add missing open telemetry span and correct scheduled slots documentation (#41985) +- Fix require_confirmation_dag_change (#42063) (#42211) +- Only treat null/undefined as falsy when rendering XComEntry (#42199) (#42213) +- Add extra and ``renderedTemplates`` as keys to skip ``camelCasing`` (#42206) (#42208) +- Do not ``camelcase`` xcom entries (#42182) (#42187) +- Fix task_instance and dag_run links from list views (#42138) (#42143) +- Support multi-line input for Params of type string in trigger UI form (#40414) (#42139) +- Fix details tab log url detection (#42104) (#42114) +- Add new type of exception to catch timeout (#42064) (#42078) +- Rewrite how DAG to dataset / dataset alias are stored (#41987) (#42055) +- Allow dataset alias to add more than one dataset events (#42189) (#42247) -Fernet is enabled by default -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Miscellaneous +""""""""""""" +- Limit universal-pathlib below ``0.2.4`` as it breaks our integration (#42101) +- Auto-fix default deferrable with ``LibCST`` (#42089) +- Deprecate ``--tree`` flag for ``tasks list`` cli command (#41965) -The fernet mechanism is enabled by default to increase the security of the default installation. In order to -restore the previous behavior, the user must consciously set an empty key in the ``fernet_key`` option of -section ``[core]`` in the ``airflow.cfg`` file. +Doc Only Changes +"""""""""""""""" +- Update ``security_model.rst`` to clear unauthenticated endpoints exceptions (#42085) +- Add note about dataclasses and attrs to XComs page (#42056) +- Improve docs on markdown docs in DAGs (#42013) +- Add warning that listeners can be dangerous (#41968) -At the same time, this means that the ``apache-airflow[crypto]`` extra-packages are always installed. -However, this requires that your operating system has ``libffi-dev`` installed. -Changes to propagating Kubernetes worker annotations -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow 2.10.1 (2024-09-05) +--------------------------- -``kubernetes_annotations`` configuration section has been removed. -A new key ``worker_annotations`` has been added to existing ``kubernetes`` section instead. -That is to remove restriction on the character set for k8s annotation keys. -All key/value pairs from ``kubernetes_annotations`` should now go to ``worker_annotations`` as a json. I.e. instead of e.g. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -.. code-block:: +No significant changes. - [kubernetes_annotations] - annotation_key = annotation_value - annotation_key2 = annotation_value2 +Bug Fixes +""""""""" +- Handle Example dags case when checking for missing files (#41874) +- Fix logout link in "no roles" error page (#41845) +- Set end_date and duration for triggers completed with end_from_trigger as True. (#41834) +- DAGs are not marked as stale if the dags folder change (#41829) +- Fix compatibility with FAB provider versions <1.3.0 (#41809) +- Don't Fail LocalTaskJob on heartbeat (#41810) +- Remove deprecation warning for cgitb in Plugins Manager (#41793) +- Fix log for notifier(instance) without ``__name__`` (#41699) +- Splitting syspath preparation into stages (#41694) +- Adding url sanitization for extra links (#41680) +- Fix InletEventsAccessors type stub (#41607) +- Fix UI rendering when XCom is INT, FLOAT, BOOL or NULL (#41605) +- Fix try selector refresh (#41503) +- Incorrect try number subtraction producing invalid span id for OTEL airflow (#41535) +- Add WebEncoder for trigger page rendering to avoid render failure (#41485) +- Adding ``tojson`` filter to example_inlet_event_extra example dag (#41890) +- Add backward compatibility check for executors that don't inherit BaseExecutor (#41927) -it should be rewritten to +Miscellaneous +""""""""""""" +- Bump webpack from 5.76.0 to 5.94.0 in /airflow/www (#41879) +- Adding rel property to hyperlinks in logs (#41783) +- Field Deletion Warning when editing Connections (#41504) +- Make Scarf usage reporting in major+minor versions and counters in buckets (#41900) +- Lower down universal-pathlib minimum to 0.2.2 (#41943) +- Protect against None components of universal pathlib xcom backend (#41938) -.. code-block:: +Doc Only Changes +"""""""""""""""" +- Remove Debian bullseye support (#41569) +- Add an example for auth with ``keycloak`` (#41791) - [kubernetes] - worker_annotations = { "annotation_key" : "annotation_value", "annotation_key2" : "annotation_value2" } -Remove run_duration -~~~~~~~~~~~~~~~~~~~ +Airflow 2.10.0 (2024-08-15) +--------------------------- -We should not use the ``run_duration`` option anymore. This used to be for restarting the scheduler from time to time, but right now the scheduler is getting more stable and therefore using this setting is considered bad and might cause an inconsistent state. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Rename pool statsd metrics -~~~~~~~~~~~~~~~~~~~~~~~~~~ +Scarf based telemetry: Airflow now collect telemetry data (#39510) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Airflow integrates Scarf to collect basic usage data during operation. Deployments can opt-out of data collection by +setting the ``[usage_data_collection]enabled`` option to ``False``, or the ``SCARF_ANALYTICS=false`` environment variable. -Used slot has been renamed to running slot to make the name self-explanatory -and the code more maintainable. +Datasets no longer trigger inactive DAGs (#38891) +""""""""""""""""""""""""""""""""""""""""""""""""" -This means ``pool.used_slots.`` metric has been renamed to -``pool.running_slots.``. The ``Used Slots`` column in Pools Web UI view -has also been changed to ``Running Slots``. +Previously, when a DAG is paused or removed, incoming dataset events would still +trigger it, and the DAG would run when it is unpaused or added back in a DAG +file. This has been changed; a DAG's dataset schedule can now only be satisfied +by events that occur when the DAG is active. While this is a breaking change, +the previous behavior is considered a bug. -Removal of Mesos Executor -~~~~~~~~~~~~~~~~~~~~~~~~~ +The behavior of time-based scheduling is unchanged, including the timetable part +of ``DatasetOrTimeSchedule``. -The Mesos Executor is removed from the code base as it was not widely used and not maintained. `Mailing List Discussion on deleting it `_. +``try_number`` is no longer incremented during task execution (#39336) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Change dag loading duration metric name -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Previously, the try number (``try_number``) was incremented at the beginning of task execution on the worker. This was problematic for many reasons. +For one it meant that the try number was incremented when it was not supposed to, namely when resuming from reschedule or deferral. And it also resulted in +the try number being "wrong" when the task had not yet started. The workarounds for these two issues caused a lot of confusion. -Change DAG file loading duration metric from -``dag.loading-duration.`` to ``dag.loading-duration.``. This is to -better handle the case when a DAG file has multiple DAGs. +Now, instead, the try number for a task run is determined at the time the task is scheduled, and does not change in flight, and it is never decremented. +So after the task runs, the observed try number remains the same as it was when the task was running; only when there is a "new try" will the try number be incremented again. -Sentry is disabled by default -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +One consequence of this change is, if users were "manually" running tasks (e.g. by calling ``ti.run()`` directly, or command line ``airflow tasks run``), +try number will no longer be incremented. Airflow assumes that tasks are always run after being scheduled by the scheduler, so we do not regard this as a breaking change. -Sentry is disabled by default. To enable these integrations, you need set ``sentry_on`` option -in ``[sentry]`` section to ``"True"``. +``/logout`` endpoint in FAB Auth Manager is now CSRF protected (#40145) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Simplified GCSTaskHandler configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The ``/logout`` endpoint's method in FAB Auth Manager has been changed from ``GET`` to ``POST`` in all existing +AuthViews (``AuthDBView``, ``AuthLDAPView``, ``AuthOAuthView``, ``AuthOIDView``, ``AuthRemoteUserView``), and +now includes CSRF protection to enhance security and prevent unauthorized logouts. -In previous versions, in order to configure the service account key file, you had to create a connection entry. -In the current version, you can configure ``google_key_path`` option in ``[logging]`` section to set -the key file path. +OpenTelemetry Traces for Apache Airflow (#37948). +""""""""""""""""""""""""""""""""""""""""""""""""" +This new feature adds capability for Apache Airflow to emit 1) airflow system traces of scheduler, +triggerer, executor, processor 2) DAG run traces for deployed DAG runs in OpenTelemetry format. Previously, only metrics were supported which emitted metrics in OpenTelemetry. +This new feature will add richer data for users to use OpenTelemetry standard to emit and send their trace data to OTLP compatible endpoints. -Users using Application Default Credentials (ADC) need not take any action. +Decorator for Task Flow ``(@skip_if, @run_if)`` to make it simple to apply whether or not to skip a Task. (#41116) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +This feature adds a decorator to make it simple to skip a Task. -The change aims to simplify the configuration of logging, to prevent corruption of -the instance configuration by changing the value controlled by the user - connection entry. If you -configure a backend secret, it also means the webserver doesn't need to connect to it. This -simplifies setups with multiple GCP projects, because only one project will require the Secret Manager API -to be enabled. +Using Multiple Executors Concurrently (#40701) +"""""""""""""""""""""""""""""""""""""""""""""" +Previously known as hybrid executors, this new feature allows Airflow to use multiple executors concurrently. DAGs, or even individual tasks, can be configured +to use a specific executor that suits its needs best. A single DAG can contain tasks all using different executors. Please see the Airflow documentation for +more details. Note: This feature is still experimental. See `documentation on Executor `_ for a more detailed description. -Changes to the core operators/hooks -""""""""""""""""""""""""""""""""""" +New Features +"""""""""""" +- AIP-61 Hybrid Execution (`AIP-61 `_) +- AIP-62 Getting Lineage from Hook Instrumentation (`AIP-62 `_) +- AIP-64 TaskInstance Try History (`AIP-64 `_) +- AIP-44 Internal API (`AIP-44 `_) +- Enable ending the task directly from the triggerer without going into the worker. (#40084) +- Extend dataset dependencies (#40868) +- Feature/add token authentication to internal api (#40899) +- Add DatasetAlias to support dynamic Dataset Event Emission and Dataset Creation (#40478) +- Add example DAGs for inlet_events (#39893) +- Implement ``accessors`` to read dataset events defined as inlet (#39367) +- Decorator for Task Flow, to make it simple to apply whether or not to skip a Task. (#41116) +- Add start execution from triggerer support to dynamic task mapping (#39912) +- Add try_number to log table (#40739) +- Added ds_format_locale method in macros which allows localizing datetime formatting using Babel (#40746) +- Add DatasetAlias to support dynamic Dataset Event Emission and Dataset Creation (#40478, #40723, #40809, #41264, #40830, #40693, #41302) +- Use sentinel to mark dag as removed on re-serialization (#39825) +- Add parameter for the last number of queries to the DB in DAG file processing stats (#40323) +- Add prototype version dark mode for Airflow UI (#39355) +- Add ability to mark some tasks as successful in ``dag test`` (#40010) +- Allow use of callable for template_fields (#37028) +- Filter running/failed and active/paused dags on the home page(#39701) +- Add metrics about task CPU and memory usage (#39650) +- UI changes for DAG Re-parsing feature (#39636) +- Add Scarf based telemetry (#39510, #41318) +- Add dag re-parsing request endpoint (#39138) +- Redirect to new DAGRun after trigger from Grid view (#39569) +- Display ``endDate`` in task instance tooltip. (#39547) +- Implement ``accessors`` to read dataset events defined as inlet (#39367, #39893) +- Add color to log lines in UI for error and warnings based on keywords (#39006) +- Add Rendered k8s pod spec tab to ti details view (#39141) +- Make audit log before/after filterable (#39120) +- Consolidate grid collapse actions to a single full screen toggle (#39070) +- Implement Metadata to emit runtime extra (#38650) +- Add executor field to the DB and parameter to the operators (#38474) +- Implement context accessor for DatasetEvent extra (#38481) +- Add dataset event info to dag graph (#41012) +- Add button to toggle datasets on/off in dag graph (#41200) +- Add ``run_if`` & ``skip_if`` decorators (#41116) +- Add dag_stats rest api endpoint (#41017) +- Add listeners for Dag import errors (#39739) +- Allowing DateTimeSensorAsync, FileSensor and TimeSensorAsync to start execution from trigger during dynamic task mapping (#41182) -We strive to ensure that there are no changes that may affect the end user and your files, but this -release may contain changes that will require changes to your DAG files. -This section describes the changes that have been made, and what you need to do to update your DAG File, -if you use core operators or any other. +Improvements +"""""""""""" +- Allow set Dag Run resource into Dag Level permission: extends Dag's access_control feature to allow Dag Run resource permissions. (#40703) +- Improve security and error handling for the internal API (#40999) +- Datasets UI Improvements (#40871) +- Change DAG Audit log tab to Event Log (#40967) +- Make standalone dag file processor works in DB isolation mode (#40916) +- Show only the source on the consumer DAG page and only triggered DAG run in the producer DAG page (#41300) +- Update metrics names to allow multiple executors to report metrics (#40778) +- Format DAG run count (#39684) +- Update styles for ``renderedjson`` component (#40964) +- Improve ATTRIBUTE_REMOVED sentinel to use class and more context (#40920) +- Make XCom display as react json (#40640) +- Replace usages of task context logger with the log table (#40867) +- Rollback for all retry exceptions (#40882) (#40883) +- Support rendering ObjectStoragePath value (#40638) +- Add try_number and map_index as params for log event endpoint (#40845) +- Rotate fernet key in batches to limit memory usage (#40786) +- Add gauge metric for 'last_num_of_db_queries' parameter (#40833) +- Set parallelism log messages to warning level for better visibility (#39298) +- Add error handling for encoding the dag runs (#40222) +- Use params instead of dag_run.conf in example DAG (#40759) +- Load Example Plugins with Example DAGs (#39999) +- Stop deferring TimeDeltaSensorAsync task when the target_dttm is in the past (#40719) +- Send important executor logs to task logs (#40468) +- Open external links in new tabs (#40635) +- Attempt to add ReactJSON view to rendered templates (#40639) +- Speeding up regex match time for custom warnings (#40513) +- Refactor DAG.dataset_triggers into the timetable class (#39321) +- add next_kwargs to StartTriggerArgs (#40376) +- Improve UI error handling (#40350) +- Remove double warning in CLI when config value is deprecated (#40319) +- Implement XComArg concat() (#40172) +- Added ``get_extra_dejson`` method with nested parameter which allows you to specify if you want the nested json as string to be also deserialized (#39811) +- Add executor field to the task instance API (#40034) +- Support checking for db path absoluteness on Windows (#40069) +- Introduce StartTriggerArgs and prevent start trigger initialization in scheduler (#39585) +- Add task documentation to details tab in grid view (#39899) +- Allow executors to be specified with only the class name of the Executor (#40131) +- Remove obsolete conditional logic related to try_number (#40104) +- Allow Task Group Ids to be passed as branches in BranchMixIn (#38883) +- Javascript connection form will apply CodeMirror to all textarea's dynamically (#39812) +- Determine needs_expansion at time of serialization (#39604) +- Add indexes on dag_id column in referencing tables to speed up deletion of dag records (#39638) +- Add task failed dependencies to details page (#38449) +- Remove webserver try_number adjustment (#39623) +- Implement slicing in lazy sequence (#39483) +- Unify lazy db sequence implementations (#39426) +- Add ``__getattr__`` to task decorator stub (#39425) +- Allow passing labels to FAB Views registered via Plugins (#39444) +- Simpler error message when trying to offline migrate with sqlite (#39441) +- Add soft_fail to TriggerDagRunOperator (#39173) +- Rename "dataset event" in context to use "outlet" (#39397) +- Resolve ``RemovedIn20Warning`` in ``airflow task`` command (#39244) +- Determine fail_stop on client side when db isolated (#39258) +- Refactor cloudpickle support in Python operators/decorators (#39270) +- Update trigger kwargs migration to specify existing_nullable (#39361) +- Allowing tasks to start execution directly from triggerer without going to worker (#38674) +- Better ``db migrate`` error messages (#39268) +- Add stacklevel into the ``suppress_and_warn`` warning (#39263) +- Support searching by dag_display_name (#39008) +- Allow sort by on all fields in MappedInstances.tsx (#38090) +- Expose count of scheduled tasks in metrics (#38899) +- Use ``declarative_base`` from ``sqlalchemy.orm`` instead of ``sqlalchemy.ext.declarative`` (#39134) +- Add example DAG to demonstrate emitting approaches (#38821) +- Give ``on_task_instance_failed`` access to the error that caused the failure (#38155) +- Simplify dataset serialization (#38694) +- Add heartbeat recovery message to jobs (#34457) +- Remove select_column option in TaskInstance.get_task_instance (#38571) +- Don't create session in get_dag if not reading dags from database (#38553) +- Add a migration script for encrypted trigger kwargs (#38358) +- Implement render_templates on TaskInstancePydantic (#38559) +- Handle optional session in _refresh_from_db (#38572) +- Make type annotation less confusing in task_command.py (#38561) +- Use fetch_dagrun directly to avoid session creation (#38557) +- Added ``output_processor`` parameter to ``BashProcessor`` (#40843) +- Improve serialization for Database Isolation Mode (#41239) +- Only orphan non-orphaned Datasets (#40806) +- Adjust gantt width based on task history dates (#41192) +- Enable scrolling on legend with high number of elements. (#41187) -BaseSensorOperator now respects the trigger_rule of downstream tasks -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" +- Bugfix for get_parsing_context() when ran with LocalExecutor (#40738) +- Validating provider documentation urls before displaying in views (#40933) +- Move import to make PythonOperator working on Windows (#40424) +- Fix dataset_with_extra_from_classic_operator example DAG (#40747) +- Call listener on_task_instance_failed() after ti state is changed (#41053) +- Add ``never_fail`` in BaseSensor (#40915) +- Fix tasks API endpoint when DAG doesn't have ``start_date`` (#40878) +- Fix and adjust URL generation for UI grid and older runs (#40764) +- Rotate fernet key optimization (#40758) +- Fix class instance vs. class type in validate_database_executor_compatibility() call (#40626) +- Clean up dark mode (#40466) +- Validate expected types for args for DAG, BaseOperator and TaskGroup (#40269) +- Exponential Backoff Not Functioning in BaseSensorOperator Reschedule Mode (#39823) +- local task job: add timeout, to not kill on_task_instance_success listener prematurely (#39890) +- Move Post Execution Log Grouping behind Exception Print (#40146) +- Fix triggerer race condition in HA setting (#38666) +- Pass triggered or existing DAG Run logical date to DagStateTrigger (#39960) +- Passing ``external_task_group_id`` to ``WorkflowTrigger`` (#39617) +- ECS Executor: Set tasks to RUNNING state once active (#39212) +- Only heartbeat if necessary in backfill loop (#39399) +- Fix trigger kwarg encryption migration (#39246) +- Fix decryption of trigger kwargs when downgrading. (#38743) +- Fix wrong link in TriggeredDagRuns (#41166) +- Pass MapIndex to LogLink component for external log systems (#41125) +- Add NonCachingRotatingFileHandler for worker task (#41064) +- Add argument include_xcom in method resolve an optional value (#41062) +- Sanitizing file names in example_bash_decorator DAG (#40949) +- Show dataset aliases in dependency graphs (#41128) +- Render Dataset Conditions in DAG Graph view (#41137) +- Add task duration plot across dagruns (#40755) +- Add start execution from trigger support for existing core sensors (#41021) +- add example dag for dataset_alias (#41037) +- Add dataset alias unique constraint and remove wrong dataset alias removing logic (#41097) +- Set "has_outlet_datasets" to true if "dataset alias" exists (#41091) +- Make HookLineageCollector group datasets by (#41034) +- Enhance start_trigger_args serialization (#40993) +- Refactor ``BaseSensorOperator`` introduce ``skip_policy`` parameter (#40924) +- Fix viewing logs from triggerer when task is deferred (#41272) +- Refactor how triggered dag run url is replaced (#41259) +- Added support for additional sql alchemy session args (#41048) +- Allow empty list in TriggerDagRun failed_state (#41249) +- Clean up the exception handler when run_as_user is the airflow user (#41241) +- Collapse docs when click and folded (#41214) +- Update updated_at when saving to db as session.merge does not trigger on-update (#40782) +- Fix query count statistics when parsing DAF file (#41149) +- Method Resolution Order in operators without ``__init__`` (#41086) +- Ensure try_number incremented for empty operator (#40426) -Previously, BaseSensorOperator with setting ``soft_fail=True`` skips itself -and skips all its downstream tasks unconditionally, when it fails i.e the trigger_rule of downstream tasks is not -respected. +Miscellaneous +""""""""""""" +- Remove the Experimental flag from ``OTel`` Traces (#40874) +- Bump packaging version to 23.0 in order to fix issue with older otel (#40865) +- Simplify _auth_manager_is_authorized_map function (#40803) +- Use correct unknown executor exception in scheduler job (#40700) +- Add D1 ``pydocstyle`` rules to pyproject.toml (#40569) +- Enable enforcing ``pydocstyle`` rule D213 in ruff. (#40448, #40464) +- Update ``Dag.test()`` to run with an executor if desired (#40205) +- Update jest and babel minor versions (#40203) +- Refactor BashOperator and Bash decorator for consistency and simplicity (#39871) +- Add ``AirflowInternalRuntimeError`` for raise ``non catchable`` errors (#38778) +- ruff version bump 0.4.5 (#39849) +- Bump ``pytest`` to 8.0+ (#39450) +- Remove stale comment about TI index (#39470) +- Configure ``back_populates`` between ``DagScheduleDatasetReference.dag`` and ``DagModel.schedule_dataset_references`` (#39392) +- Remove deprecation warnings in endpoints.py (#39389) +- Fix SQLA deprecations in Airflow core (#39211) +- Use class-bound attribute directly in SA (#39198, #39195) +- Fix stacklevel for TaskContextLogger (#39142) +- Capture warnings during collect DAGs (#39109) +- Resolve ``B028`` (no-explicit-stacklevel) in core (#39123) +- Rename model ``ImportError`` to ``ParseImportError`` for avoid shadowing with builtin exception (#39116) +- Add option to support cloudpickle in PythonVenv/External Operator (#38531) +- Suppress ``SubDagOperator`` examples warnings (#39057) +- Add log for running callback (#38892) +- Use ``model_dump`` instead of ``dict`` for serialize Pydantic V2 model (#38933) +- Widen cheat sheet column to avoid wrapping commands (#38888) +- Update ``hatchling`` to latest version (1.22.5) (#38780) +- bump uv to 0.1.29 (#38758) +- Add missing serializations found during provider tests fixing (#41252) +- Bump ``ws`` from 7.5.5 to 7.5.10 in /airflow/www (#40288) +- Improve typing for allowed/failed_states in TriggerDagRunOperator (#39855) -In the new behavior, the trigger_rule of downstream tasks is respected. -User can preserve/achieve the original behaviour by setting the trigger_rule of each downstream task to ``all_success``. +Doc Only Changes +"""""""""""""""" +- Add ``filesystems`` and ``dataset-uris`` to "how to create your own provider" page (#40801) +- Fix (TM) to (R) in Airflow repository (#40783) +- Set ``otel_on`` to True in example airflow.cfg (#40712) +- Add warning for _AIRFLOW_PATCH_GEVENT (#40677) +- Update multi-team diagram proposal after Airflow 3 discussions (#40671) +- Add stronger warning that MSSQL is not supported and no longer functional (#40565) +- Fix misleading mac menu structure in howto (#40440) +- Update k8s supported version in docs (#39878) +- Add compatibility note for Listeners (#39544) +- Update edge label image in documentation example with the new graph view (#38802) +- Update UI doc screenshots (#38680) +- Add section "Manipulating queued dataset events through REST API" (#41022) +- Add information about lack of security guarantees for docker compose (#41072) +- Add links to example dags in use params section (#41031) +- Change ``task_id`` from ``send_email`` to ``send_email_notification`` in ``taskflow.rst`` (#41060) +- Remove unnecessary nginx redirect rule from reverse proxy documentation (#38953) -BaseOperator uses metaclass -~~~~~~~~~~~~~~~~~~~~~~~~~~~ -``BaseOperator`` class uses a ``BaseOperatorMeta`` as a metaclass. This meta class is based on -``abc.ABCMeta``. If your custom operator uses different metaclass then you will have to adjust it. -Remove SQL support in BaseHook -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow 2.9.3 (2024-07-15) +-------------------------- -Remove ``get_records`` and ``get_pandas_df`` and ``run`` from BaseHook, which only apply for SQL-like hook, -If want to use them, or your custom hook inherit them, please use ``airflow.hooks.dbapi.DbApiHook`` +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Assigning task to a DAG using bitwise shift (bit-shift) operators are no longer supported -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Time unit for ``scheduled_duration`` and ``queued_duration`` changed (#37936) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Previously, you could assign a task to a DAG as follows: +``scheduled_duration`` and ``queued_duration`` metrics are now emitted in milliseconds instead of seconds. -.. code-block:: python +By convention all statsd metrics should be emitted in milliseconds, this is later expected in e.g. ``prometheus`` statsd-exporter. - dag = DAG("my_dag") - dummy = DummyOperator(task_id="dummy") - dag >> dummy +Support for OpenTelemetry Metrics is no longer "Experimental" (#40286) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -This is no longer supported. Instead, we recommend using the DAG as context manager: +Experimental support for OpenTelemetry was added in 2.7.0 since then fixes and improvements were added and now we announce the feature as stable. -.. code-block:: python - with DAG("my_dag") as dag: - dummy = DummyOperator(task_id="dummy") -Removed deprecated import mechanism -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" +- Fix calendar view scroll (#40458) +- Validating provider description for urls in provider list view (#40475) +- Fix compatibility with old MySQL 8.0 (#40314) +- Fix dag (un)pausing won't work on environment where dag files are missing (#40345) +- Extra being passed to SQLalchemy (#40391) +- Handle unsupported operand int + str when value of tag is int (job_id) (#40407) +- Fix TriggeredDagRunOperator triggered link (#40336) +- Add ``[webserver]update_fab_perms`` to deprecated configs (#40317) +- Swap dag run link from legacy graph to grid with graph tab (#40241) +- Change ``httpx`` to ``requests`` in ``file_task_handler`` (#39799) +- Fix import future annotations in venv jinja template (#40208) +- Ensures DAG params order regardless of backend (#40156) +- Use a join for TI notes in TI batch API endpoint (#40028) +- Improve trigger UI for string array format validation (#39993) +- Disable jinja2 rendering for doc_md (#40522) +- Skip checking sub dags list if taskinstance state is skipped (#40578) +- Recognize quotes when parsing urls in logs (#40508) -The deprecated import mechanism has been removed so the import of modules becomes more consistent and explicit. +Doc Only Changes +"""""""""""""""" +- Add notes about passing secrets via environment variables (#40519) +- Revamp some confusing log messages (#40334) +- Add more precise description of masking sensitive field names (#40512) +- Add slightly more detailed guidance about upgrading to the docs (#40227) +- Metrics allow_list complete example (#40120) +- Add warning to deprecated api docs that access control isn't applied (#40129) +- Simpler command to check local scheduler is alive (#40074) +- Add a note and an example clarifying the usage of DAG-level params (#40541) +- Fix highlight of example code in dags.rst (#40114) +- Add warning about the PostgresOperator being deprecated (#40662) +- Updating airflow download links to CDN based links (#40618) +- Fix import statement for DatasetOrTimetable example (#40601) +- Further clarify triage process (#40536) +- Fix param order in PythonOperator docstring (#40122) +- Update serializers.rst to mention that bytes are not supported (#40597) -For example: ``from airflow.operators import BashOperator`` -becomes ``from airflow.operators.bash_operator import BashOperator`` +Miscellaneous +""""""""""""" +- Upgrade build installers and dependencies (#40177) +- Bump braces from 3.0.2 to 3.0.3 in /airflow/www (#40180) +- Upgrade to another version of trove-classifier (new CUDA classifiers) (#40564) +- Rename "try_number" increments that are unrelated to the airflow concept (#39317) +- Update trove classifiers to the latest version as build dependency (#40542) +- Upgrade to latest version of ``hatchling`` as build dependency (#40387) +- Fix bug in ``SchedulerJobRunner._process_executor_events`` (#40563) +- Remove logging for "blocked" events (#40446) -Changes to sensor imports -~~~~~~~~~~~~~~~~~~~~~~~~~ -Sensors are now accessible via ``airflow.sensors`` and no longer via ``airflow.operators.sensors``. -For example: ``from airflow.operators.sensors import BaseSensorOperator`` -becomes ``from airflow.sensors.base import BaseSensorOperator`` +Airflow 2.9.2 (2024-06-10) +-------------------------- -Skipped tasks can satisfy wait_for_downstream -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Previously, a task instance with ``wait_for_downstream=True`` will only run if the downstream task of -the previous task instance is successful. Meanwhile, a task instance with ``depends_on_past=True`` -will run if the previous task instance is either successful or skipped. These two flags are close siblings -yet they have different behavior. This inconsistency in behavior made the API less intuitive to users. -To maintain consistent behavior, both successful or skipped downstream task can now satisfy the -``wait_for_downstream=True`` flag. +No significant changes. -``airflow.utils.helpers.cross_downstream`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" +- Fix bug that makes ``AirflowSecurityManagerV2`` leave transactions in the ``idle in transaction`` state (#39935) +- Fix alembic auto-generation and rename mismatching constraints (#39032) +- Add the existing_nullable to the downgrade side of the migration (#39374) +- Fix Mark Instance state buttons stay disabled if user lacks permission (#37451). (#38732) +- Use SKIP LOCKED instead of NOWAIT in mini scheduler (#39745) +- Remove DAG Run Add option from FAB view (#39881) +- Add max_consecutive_failed_dag_runs in API spec (#39830) +- Fix example_branch_operator failing in python 3.12 (#39783) +- Fetch served logs also when task attempt is up for retry and no remote logs available (#39496) +- Change dataset URI validation to raise warning instead of error in Airflow 2.9 (#39670) +- Visible DAG RUN doesn't point to the same dag run id (#38365) +- Refactor ``SafeDogStatsdLogger`` to use ``get_validator`` to enable pattern matching (#39370) +- Fix custom actions in security manager ``has_access`` (#39421) +- Fix HTTP 500 Internal Server Error if DAG is triggered with bad params (#39409) +- Fix static file caching is disabled in Airflow Webserver. (#39345) +- Fix TaskHandlerWithCustomFormatter now adds prefix only once (#38502) +- Do not provide deprecated ``execution_date`` in ``@apply_lineage`` (#39327) +- Add missing conn_id to string representation of ObjectStoragePath (#39313) +- Fix ``sql_alchemy_engine_args`` config example (#38971) +- Add Cache-Control "no-store" to all dynamically generated content (#39550) -``airflow.utils.helpers.chain`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Miscellaneous +""""""""""""" +- Limit ``yandex`` provider to avoid ``mypy`` errors (#39990) +- Warn on mini scheduler failures instead of debug (#39760) +- Change type definition for ``provider_info_cache`` decorator (#39750) +- Better typing for BaseOperator ``defer`` (#39742) +- More typing in TimeSensor and TimeSensorAsync (#39696) +- Re-raise exception from strict dataset URI checks (#39719) +- Fix stacklevel for _log_state helper (#39596) +- Resolve SA warnings in migrations scripts (#39418) +- Remove unused index ``idx_last_scheduling_decision`` on ``dag_run`` table (#39275) -The ``chain`` and ``cross_downstream`` methods are now moved to airflow.models.baseoperator module from -``airflow.utils.helpers`` module. +Doc Only Changes +"""""""""""""""" +- Provide extra tip on labeling DynamicTaskMapping (#39977) +- Improve visibility of links / variables / other configs in Configuration Reference (#39916) +- Remove 'legacy' definition for ``CronDataIntervalTimetable`` (#39780) +- Update plugins.rst examples to use pyproject.toml over setup.py (#39665) +- Fix nit in pg set-up doc (#39628) +- Add Matomo to Tracking User Activity docs (#39611) +- Fix Connection.get -> Connection. get_connection_from_secrets (#39560) +- Adding note for provider dependencies (#39512) +- Update docker-compose command (#39504) +- Update note about restarting triggerer process (#39436) +- Updating S3LogLink with an invalid bucket link (#39424) +- Update testing_packages.rst (#38996) +- Add multi-team diagrams (#38861) -The ``baseoperator`` module seems to be a better choice to keep -closely coupled methods together. Helpers module is supposed to contain standalone helper methods -that can be imported by all classes. -The ``chain`` method and ``cross_downstream`` method both use BaseOperator. If any other package imports -any classes or functions from helpers module, then it automatically has an -implicit dependency to BaseOperator. That can often lead to cyclic dependencies. -More information in `AIRFLOW-6392 `_ +Airflow 2.9.1 (2024-05-03) +-------------------------- -In Airflow < 2.0 you imported those two methods like this: +Significant Changes +^^^^^^^^^^^^^^^^^^^ -.. code-block:: python +Stackdriver logging bugfix requires Google provider ``10.17.0`` or later (#38071) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - from airflow.utils.helpers import chain - from airflow.utils.helpers import cross_downstream +If you use Stackdriver logging, you must use Google provider version ``10.17.0`` or later. Airflow ``2.9.1`` now passes ``gcp_log_name`` to the ``StackdriverTaskHandler`` instead of ``name``, and this will fail on earlier provider versions. -In Airflow 2.0 it should be changed to: +This fixes a bug where the log name configured in ``[logging] remove_base_log_folder`` was overridden when Airflow configured logging, resulting in task logs going to the wrong destination. -.. code-block:: python - from airflow.models.baseoperator import chain - from airflow.models.baseoperator import cross_downstream -``airflow.operators.python.BranchPythonOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" +- Make task log messages include run_id (#39280) +- Copy menu_item ``href`` for nav bar (#39282) +- Fix trigger kwarg encryption migration (#39246, #39361, #39374) +- Add workaround for datetime-local input in ``firefox`` (#39261) +- Add Grid button to Task Instance view (#39223) +- Get served logs when remote or executor logs not available for non-running task try (#39177) +- Fixed side effect of menu filtering causing disappearing menus (#39229) +- Use grid view for Task Instance's ``log_url`` (#39183) +- Improve task filtering ``UX`` (#39119) +- Improve rendered_template ``ux`` in react dag page (#39122) +- Graph view improvements (#38940) +- Check that the dataset<>task exists before trying to render graph (#39069) +- Hostname was "redacted", not "redact"; remove it when there is no context (#39037) +- Check whether ``AUTH_ROLE_PUBLIC`` is set in ``check_authentication`` (#39012) +- Move rendering of ``map_index_template`` so it renders for failed tasks as long as it was defined before the point of failure (#38902) +- ``Undeprecate`` ``BaseXCom.get_one`` method for now (#38991) +- Add ``inherit_cache`` attribute for ``CreateTableAs`` custom SA Clause (#38985) +- Don't wait for DagRun lock in mini scheduler (#38914) +- Fix calendar view with no DAG Run (#38964) +- Changed the background color of external task in graph (#38969) +- Fix dag run selection (#38941) +- Fix ``SAWarning`` 'Coercing Subquery object into a select() for use in IN()' (#38926) +- Fix implicit ``cartesian`` product in AirflowSecurityManagerV2 (#38913) +- Fix problem that links in legacy log view can not be clicked (#38882) +- Fix dag run link params (#38873) +- Use async db calls in WorkflowTrigger (#38689) +- Fix audit log events filter (#38719) +- Use ``methodtools.lru_cache`` instead of ``functools.lru_cache`` in class methods (#37757) +- Raise deprecated warning in ``airflow dags backfill`` only if ``-I`` / ``--ignore-first-depends-on-past`` provided (#38676) -``BranchPythonOperator`` will now return a value equal to the ``task_id`` of the chosen branch, -where previously it returned None. Since it inherits from BaseOperator it will do an -``xcom_push`` of this value if ``do_xcom_push=True``. This is useful for downstream decision-making. +Miscellaneous +""""""""""""" +- ``TriggerDagRunOperator`` deprecate ``execution_date`` in favor of ``logical_date`` (#39285) +- Force to use Airflow Deprecation warnings categories on ``@deprecated`` decorator (#39205) +- Add warning about run/import Airflow under the Windows (#39196) +- Update ``is_authorized_custom_view`` from auth manager to handle custom actions (#39167) +- Add in Trove classifiers Python 3.12 support (#39004) +- Use debug level for ``minischeduler`` skip (#38976) +- Bump ``undici`` from ``5.28.3 to 5.28.4`` in ``/airflow/www`` (#38751) -``airflow.sensors.sql_sensor.SqlSensor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -SQLSensor now consistent with python ``bool()`` function and the ``allow_null`` parameter has been removed. +Doc Only Changes +"""""""""""""""" +- Fix supported k8s version in docs (#39172) +- Dynamic task mapping ``PythonOperator`` op_kwargs (#39242) +- Add link to ``user`` and ``role`` commands (#39224) +- Add ``k8s 1.29`` to supported version in docs (#39168) +- Data aware scheduling docs edits (#38687) +- Update ``DagBag`` class docstring to include all params (#38814) +- Correcting an example taskflow example (#39015) +- Remove decorator from rendering fields example (#38827) -It will resolve after receiving any value that is casted to ``True`` with python ``bool(value)``. That -changes the previous response receiving ``NULL`` or ``'0'``. Earlier ``'0'`` has been treated as success -criteria. ``NULL`` has been treated depending on value of ``allow_null``\ parameter. But all the previous -behaviour is still achievable setting param ``success`` to ``lambda x: x is None or str(x) not in ('0', '')``. -``airflow.operators.trigger_dagrun.TriggerDagRunOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The TriggerDagRunOperator now takes a ``conf`` argument to which a dict can be provided as conf for the DagRun. -As a result, the ``python_callable`` argument was removed. PR: https://github.com/apache/airflow/pull/6317. +Airflow 2.9.0 (2024-04-08) +-------------------------- -``airflow.operators.python.PythonOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``provide_context`` argument on the PythonOperator was removed. The signature of the callable passed to the PythonOperator is now inferred and argument values are always automatically provided. There is no need to explicitly provide or not provide the context anymore. For example: +Following Listener API methods are considered stable and can be used for production system (were experimental feature in older Airflow versions) (#36376): +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Lifecycle events: -.. code-block:: python +- ``on_starting`` +- ``before_stopping`` - def myfunc(execution_date): - print(execution_date) +DagRun State Change Events: +- ``on_dag_run_running`` +- ``on_dag_run_success`` +- ``on_dag_run_failed`` - python_operator = PythonOperator(task_id="mytask", python_callable=myfunc, dag=dag) +TaskInstance State Change Events: -Notice you don't have to set provide_context=True, variables from the task context are now automatically detected and provided. +- ``on_task_instance_running`` +- ``on_task_instance_success`` +- ``on_task_instance_failed`` -All context variables can still be provided with a double-asterisk argument: +Support for Microsoft SQL-Server for Airflow Meta Database has been removed (#36514) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -.. code-block:: python +After `discussion `__ +and a `voting process `__, +the Airflow's PMC members and Committers have reached a resolution to no longer maintain MsSQL as a +supported Database Backend. - def myfunc(**context): - print(context) # all variables will be provided to context +As of Airflow 2.9.0 support of MsSQL has been removed for Airflow Database Backend. +A migration script which can help migrating the database *before* upgrading to Airflow 2.9.0 is available in +`airflow-mssql-migration repo on GitHub `_. +Note that the migration script is provided without support and warranty. - python_operator = PythonOperator(task_id="mytask", python_callable=myfunc) +This does not affect the existing provider packages (operators and hooks), DAGs can still access and process data from MsSQL. -The task context variable names are reserved names in the callable function, hence a clash with ``op_args`` and ``op_kwargs`` results in an exception: +Dataset URIs are now validated on input (#37005) +"""""""""""""""""""""""""""""""""""""""""""""""" -.. code-block:: python +Datasets must use a URI that conform to rules laid down in AIP-60, and the value +will be automatically normalized when the DAG file is parsed. See +`documentation on Datasets `_ for +a more detailed description on the rules. - def myfunc(dag): - # raises a ValueError because "dag" is a reserved name - # valid signature example: myfunc(mydag) - print("output") +You may need to change your Dataset identifiers if they look like a URI, but are +used in a less mainstream way, such as relying on the URI's auth section, or +have a case-sensitive protocol name. +The method ``get_permitted_menu_items`` in ``BaseAuthManager`` has been renamed ``filter_permitted_menu_items`` (#37627) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - python_operator = PythonOperator( - task_id="mytask", - op_args=[1], - python_callable=myfunc, - ) +Add REST API actions to Audit Log events (#37734) +""""""""""""""""""""""""""""""""""""""""""""""""" -The change is backwards compatible, setting ``provide_context`` will add the ``provide_context`` variable to the ``kwargs`` (but won't do anything). +The Audit Log ``event`` name for REST API events will be prepended with ``api.`` or ``ui.``, depending on if it came from the Airflow UI or externally. -PR: `#5990 `_ +Official support for Python 3.12 (#38025) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +There are a few caveats though: -``airflow.providers.standard.sensors.filesystem.FileSensor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +* Pendulum2 does not support Python 3.12. For Python 3.12 you need to use + `Pendulum 3 `_ -FileSensor is now takes a glob pattern, not just a filename. If the filename you are looking for has ``*``\ , ``?``\ , or ``[`` in it then you should replace these with ``[*]``\ , ``[?]``\ , and ``[[]``. +* Minimum SQLAlchemy version supported when Pandas is installed for Python 3.12 is ``1.4.36`` released in + April 2022. Airflow 2.9.0 increases the minimum supported version of SQLAlchemy to ``1.4.36`` for all + Python versions. -``airflow.operators.subdag_operator.SubDagOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Not all Providers support Python 3.12. At the initial release of Airflow 2.9.0 the following providers +are released without support for Python 3.12: -``SubDagOperator`` is changed to use Airflow scheduler instead of backfill -to schedule tasks in the subdag. User no longer need to specify the executor -in ``SubDagOperator``. + * ``apache.beam`` - pending on `Apache Beam support for 3.12 `_ + * ``papermill`` - pending on Releasing Python 3.12 compatible papermill client version + `including this merged issue `_ -``airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Prevent large string objects from being stored in the Rendered Template Fields (#38094) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +There's now a limit to the length of data that can be stored in the Rendered Template Fields. +The limit is set to 4096 characters. If the data exceeds this limit, it will be truncated. You can change this limit +by setting the ``[core]max_template_field_length`` configuration option in your airflow config. -``airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Change xcom table column value type to longblob for MySQL backend (#38401) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Xcom table column ``value`` type has changed from ``blob`` to ``longblob``. This will allow you to store relatively big data in Xcom but process can take a significant amount of time if you have a lot of large data stored in Xcom. -``airflow.providers.cncf.kubernetes.operators.kubernetes_pod.KubernetesPodOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +To downgrade from revision: ``b4078ac230a1``, ensure that you don't have Xcom values larger than 65,535 bytes. Otherwise, you'll need to clean those rows or run ``airflow db clean xcom`` to clean the Xcom table. -``airflow.providers.ssh.operators.ssh.SSHOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Stronger validation for key parameter defaults in taskflow context variables (#38015) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -``airflow.providers.microsoft.winrm.operators.winrm.WinRMOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +As for the taskflow implementation in conjunction with context variable defaults invalid parameter orders can be +generated, it is now not accepted anymore (and validated) that taskflow functions are defined with defaults +other than ``None``. If you have done this before you most likely will see a broken DAG and a error message like +``Error message: Context key parameter my_param can't have a default other than None``. -``airflow.operators.bash.BashOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +New Features +"""""""""""" +- Allow users to write dag_id and task_id in their national characters, added display name for dag / task (v2) (#38446) +- Prevent large objects from being stored in the RTIF (#38094) +- Use current time to calculate duration when end date is not present. (#38375) +- Add average duration mark line in task and dagrun duration charts. (#38214, #38434) +- Add button to manually create dataset events (#38305) +- Add ``Matomo`` as an option for analytics_tool. (#38221) +- Experimental: Support custom weight_rule implementation to calculate the TI priority_weight (#38222) +- Adding ability to automatically set DAG to off after X times it failed sequentially (#36935) +- Add dataset conditions to next run datasets modal (#38123) +- Add task log grouping to UI (#38021) +- Add dataset_expression to grid dag details (#38121) +- Introduce mechanism to support multiple executor configuration (#37635) +- Add color formatting for ANSI chars in logs from task executions (#37985) +- Add the dataset_expression as part of DagModel and DAGDetailSchema (#37826) +- Allow longer rendered_map_index (#37798) +- Inherit the run_ordering from DatasetTriggeredTimetable for DatasetOrTimeSchedule (#37775) +- Implement AIP-60 Dataset URI formats (#37005) +- Introducing Logical Operators for dataset conditional logic (#37101) +- Add post endpoint for dataset events (#37570) +- Show custom instance names for a mapped task in UI (#36797) +- Add excluded/included events to get_event_logs api (#37641) +- Add datasets to dag graph (#37604) +- Show dataset events above task/run details in grid view (#37603) +- Introduce new config variable to control whether DAG processor outputs to stdout (#37439) +- Make Datasets ``hashable`` (#37465) +- Add conditional logic for dataset triggering (#37016) +- Implement task duration page in react. (#35863) +- Add ``queuedEvent`` endpoint to get/delete DatasetDagRunQueue (#37176) +- Support multiple XCom output in the BaseOperator (#37297) +- AIP-58: Add object storage backend for xcom (#37058) +- Introduce ``DatasetOrTimeSchedule`` (#36710) +- Add ``on_skipped_callback`` to ``BaseOperator`` (#36374) +- Allow override of hovered navbar colors (#36631) +- Create new Metrics with Tagging (#36528) +- Add support for openlineage to AFS and common.io (#36410) +- Introduce ``@task.bash`` TaskFlow decorator (#30176, #37875) -``airflow.providers.docker.operators.docker.DockerOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Improvements +"""""""""""" +- More human friendly "show tables" output for db cleanup (#38654) +- Improve trigger assign_unassigned by merging alive_triggerer_ids and get_sorted_triggers queries (#38664) +- Add exclude/include events filters to audit log (#38506) +- Clean up unused triggers in a single query for all dialects except MySQL (#38663) +- Update Confirmation Logic for Config Changes on Sensitive Environments Like Production (#38299) +- Improve datasets graph UX (#38476) +- Only show latest dataset event timestamp after last run (#38340) +- Add button to clear only failed tasks in a dagrun. (#38217) +- Delete all old dag pages and redirect to grid view (#37988) +- Check task attribute before use in sentry.add_tagging() (#37143) +- Mysql change xcom value col type for MySQL backend (#38401) +- ``ExternalPythonOperator`` use version from ``sys.version_info`` (#38377) +- Replace too broad exceptions into the Core (#38344) +- Add CLI support for bulk pause and resume of DAGs (#38265) +- Implement methods on TaskInstancePydantic and DagRunPydantic (#38295, #38302, #38303, #38297) +- Made filters bar collapsible and add a full screen toggle (#38296) +- Encrypt all trigger attributes (#38233, #38358, #38743) +- Upgrade react-table package. Use with Audit Log table (#38092) +- Show if dag page filters are active (#38080) +- Add try number to mapped instance (#38097) +- Add retries to job heartbeat (#37541) +- Add REST API events to Audit Log (#37734) +- Make current working directory as templated field in BashOperator (#37968) +- Add calendar view to react (#37909) +- Add ``run_id`` column to log table (#37731) +- Add ``tryNumber`` to grid task instance tooltip (#37911) +- Session is not used in _do_render_template_fields (#37856) +- Improve MappedOperator property types (#37870) +- Remove provide_session decorator from TaskInstancePydantic methods (#37853) +- Ensure the "airflow.task" logger used for TaskInstancePydantic and TaskInstance (#37857) +- Better error message for internal api call error (#37852) +- Increase tooltip size of dag grid view (#37782) (#37805) +- Use named loggers instead of root logger (#37801) +- Add Run Duration in React (#37735) +- Avoid non-recommended usage of logging (#37792) +- Improve DateTimeTrigger typing (#37694) +- Make sure all unique run_ids render a task duration bar (#37717) +- Add Dag Audit Log to React (#37682) +- Add log event for auto pause (#38243) +- Better message for exception for templated base operator fields (#37668) +- Clean up webserver endpoints adding to audit log (#37580) +- Filter datasets graph by dag_id (#37464) +- Use new exception type inheriting BaseException for SIGTERMs (#37613) +- Refactor dataset class inheritance (#37590) +- Simplify checks for package versions (#37585) +- Filter Datasets by associated dag_ids (GET /datasets) (#37512) +- Enable "airflow tasks test" to run deferrable operator (#37542) +- Make datasets list/graph width adjustable (#37425) +- Speedup determine installed airflow version in ``ExternalPythonOperator`` (#37409) +- Add more task details from rest api (#37394) +- Add confirmation dialog box for DAG run actions (#35393) +- Added shutdown color to the STATE_COLORS (#37295) +- Remove legacy dag details page and redirect to grid (#37232) +- Order XCom entries by map index in API (#37086) +- Add data_interval_start and data_interval_end in dagrun create API endpoint (#36630) +- Making links in task logs as hyperlinks by preventing HTML injection (#36829) +- Improve ExternalTaskSensor Async Implementation (#36916) +- Make Datasets ``Pathlike`` (#36947) +- Simplify query for orphaned tasks (#36566) +- Add deferrable param in FileSensor (#36840) +- Run Trigger Page: Configurable number of recent configs (#36878) +- Merge ``nowait`` and skip_locked into with_row_locks (#36889) +- Return the specified field when get ``dag/dagRun`` in the REST API (#36641) +- Only iterate over the items if debug is enabled for DagFileProcessorManager (#36761) +- Add a fuzzy/regex pattern-matching for metric allow and block list (#36250) +- Allow custom columns in cli dags list (#35250) +- Make it possible to change the default cron timetable (#34851) +- Some improvements to Airflow IO code (#36259) +- Improve TaskInstance typing hints (#36487) +- Remove dependency of ``Connexion`` from auth manager interface (#36209) +- Refactor ExternalDagLink to not create ad hoc TaskInstances (#36135) -``airflow.providers.http.operators.http.SimpleHttpOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" +- Load providers configuration when gunicorn workers start (#38795) +- Fix grid header rendering (#38720) +- Add a task instance dependency for mapped dependencies (#37498) +- Improve stability of remove_task_decorator function (#38649) +- Mark more fields on API as dump-only (#38616) +- Fix ``total_entries`` count on the event logs endpoint (#38625) +- Add padding to bottom of log block. (#38610) +- Properly serialize nested attrs classes (#38591) +- Fixing the ``tz`` in next run ID info (#38482) +- Show abandoned tasks in Grid View (#38511) +- Apply task instance mutation hook consistently (#38440) +- Override ``chakra`` styles to keep ``dropdowns`` in filter bar (#38456) +- Store duration in seconds and scale to handle case when a value in the series has a larger unit than the preceding durations. (#38374) +- Don't allow defaults other than None in context parameters, and improve error message (#38015) +- Make postgresql default engine args comply with SA 2.0 (#38362) +- Add return statement to yield within a while loop in triggers (#38389) +- Ensure ``__exit__`` is called in decorator context managers (#38383) +- Make the method ``BaseAuthManager.is_authorized_custom_view`` abstract (#37915) +- Add upper limit to planned calendar events calculation (#38310) +- Fix Scheduler in daemon mode doesn't create PID at the specified location (#38117) +- Properly serialize TaskInstancePydantic and DagRunPydantic (#37855) +- Fix graph task state border color (#38084) +- Add back methods removed in security manager (#37997) +- Don't log "403" from worker serve-logs as "Unknown error". (#37933) +- Fix execution data validation error in ``/get_logs_with_metadata`` endpoint (#37756) +- Fix task duration selection (#37630) +- Refrain from passing ``encoding`` to the SQL engine in SQLAlchemy v2 (#37545) +- Fix 'implicitly coercing SELECT object to scalar subquery' in latest dag run statement (#37505) +- Clean up typing with max_execution_date query builder (#36958) +- Optimize max_execution_date query in single dag case (#33242) +- Fix list dags command for get_dagmodel is None (#36739) +- Load ``consuming_dags`` attr eagerly before dataset listener (#36247) -The ``do_xcom_push`` flag (a switch to push the result of an operator to xcom or not) was appearing in different incarnations in different operators. It's function has been unified under a common name (\ ``do_xcom_push``\ ) on ``BaseOperator``. This way it is also easy to globally disable pushing results to xcom. +Miscellaneous +""""""""""""" +- Remove display of param from the UI (#38660) +- Update log level to debug from warning about scheduled_duration metric (#38180) +- Use ``importlib_metadata`` with compat to Python 3.10/3.12 ``stdlib`` (#38366) +- Refactored ``__new__`` magic method of BaseOperatorMeta to avoid bad mixing classic and decorated operators (#37937) +- Use ``sys.version_info`` for determine Python Major.Minor (#38372) +- Add missing deprecated Fab auth manager (#38376) +- Remove unused loop variable from airflow package (#38308) +- Adding max consecutive failed dag runs info in UI (#38229) +- Bump minimum version of ``blinker`` add where it requires (#38140) +- Bump follow-redirects from 1.15.4 to 1.15.6 in /airflow/www (#38156) +- Bump Cryptography to ``> 39.0.0`` (#38112) +- Add Python 3.12 support (#36755, #38025, #36595) +- Avoid use of ``assert`` outside of the tests (#37718) +- Update ObjectStoragePath for universal_pathlib>=v0.2.2 (#37930) +- Resolve G004: Logging statement uses f-string (#37873) +- Update build and install dependencies. (#37910) +- Bump sanitize-html from 2.11.0 to 2.12.1 in /airflow/www (#37833) +- Update to latest installer versions. (#37754) +- Deprecate smtp configs in airflow settings / local_settings (#37711) +- Deprecate PY* constants into the airflow module (#37575) +- Remove usage of deprecated ``flask._request_ctx_stack`` (#37522) +- Remove redundant ``login`` attribute in ``airflow.__init__.py`` (#37565) +- Upgrade to FAB 4.3.11 (#37233) +- Remove SCHEDULED_DEPS which is no longer used anywhere since 2.0.0 (#37140) +- Replace ``datetime.datetime.utcnow`` by ``airflow.utils.timezone.utcnow`` in core (#35448) +- Bump aiohttp min version to avoid CVE-2024-23829 and CVE-2024-23334 (#37110) +- Move config related to FAB auth manager to FAB provider (#36232) +- Remove MSSQL support form Airflow core (#36514) +- Remove ``is_authorized_cluster_activity`` from auth manager (#36175) +- Create FAB provider and move FAB auth manager in it (#35926) -The following operators were affected: +Doc Only Changes +"""""""""""""""" +- Improve timetable documentation (#38505) +- Reorder OpenAPI Spec tags alphabetically (#38717) +- Update UI screenshots in the documentation (#38680, #38403, #38438, #38435) +- Remove section as it's no longer true with dataset expressions PR (#38370) +- Refactor DatasetOrTimeSchedule timetable docs (#37771) +- Migrate executor docs to respective providers (#37728) +- Add directive to render a list of URI schemes (#37700) +- Add doc page with providers deprecations (#37075) +- Add a cross reference to security policy (#37004) +- Improve AIRFLOW__WEBSERVER__BASE_URL docs (#37003) +- Update faq.rst with (hopefully) clearer description of start_date (#36846) +- Update public interface doc re operators (#36767) +- Add ``exception`` to templates ref list (#36656) +- Add auth manager interface as public interface (#36312) +- Reference fab provider documentation in Airflow documentation (#36310) +- Create auth manager documentation (#36211) +- Update permission docs (#36120) +- Docstring improvement to _covers_every_hour (#36081) +- Add note that task instance, dag and lifecycle listeners are non-experimental (#36376) -* DatastoreExportOperator (Backwards compatible) -* DatastoreImportOperator (Backwards compatible) -* KubernetesPodOperator (Not backwards compatible) -* SSHOperator (Not backwards compatible) -* WinRMOperator (Not backwards compatible) -* BashOperator (Not backwards compatible) -* DockerOperator (Not backwards compatible) -* SimpleHttpOperator (Not backwards compatible) +Airflow 2.8.4 (2024-03-25) +-------------------------- -See `AIRFLOW-3249 `_ for details +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``airflow.operators.latest_only_operator.LatestOnlyOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +No significant changes. -In previous versions, the ``LatestOnlyOperator`` forcefully skipped all (direct and indirect) downstream tasks on its own. From this version on the operator will **only skip direct downstream** tasks and the scheduler will handle skipping any further downstream dependencies. +Bug Fixes +""""""""" +- Fix incorrect serialization of ``FixedTimezone`` (#38139) +- Fix excessive permission changing for log task handler (#38164) +- Fix task instances list link (#38096) +- Fix a bug where scheduler heartrate parameter was not used (#37992) +- Add padding to prevent grid horizontal scroll overlapping tasks (#37942) +- Fix hash caching in ``ObjectStoragePath`` (#37769) -No change is needed if only the default trigger rule ``all_success`` is being used. +Miscellaneous +""""""""""""" +- Limit ``importlib_resources`` as it breaks ``pytest_rewrites`` (#38095, #38139) +- Limit ``pandas`` to ``<2.2`` (#37748) +- Bump ``croniter`` to fix an issue with 29 Feb cron expressions (#38198) -If the DAG relies on tasks with other trigger rules (i.e. ``all_done``\ ) being skipped by the ``LatestOnlyOperator``\ , adjustments to the DAG need to be made to accommodate the change in behaviour, i.e. with additional edges from the ``LatestOnlyOperator``. +Doc Only Changes +"""""""""""""""" +- Tell users what to do if their scanners find issues in the image (#37652) +- Add a section about debugging in Docker Compose with PyCharm (#37940) +- Update deferrable docs to clarify kwargs when trigger resumes operator (#38122) -The goal of this change is to achieve a more consistent and configurable cascading behaviour based on the ``BaseBranchOperator`` (see `AIRFLOW-2923 `_ and `AIRFLOW-1784 `_\ ). -Changes to the core Python API -"""""""""""""""""""""""""""""" +Airflow 2.8.3 (2024-03-11) +-------------------------- -We strive to ensure that there are no changes that may affect the end user, and your Python files, but this -release may contain changes that will require changes to your plugins, DAG File or other integration. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Only changes unique to this provider are described here. You should still pay attention to the changes that -have been made to the core (including core operators) as they can affect the integration behavior -of this provider. +The smtp provider is now pre-installed when you install Airflow. (#37713) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -This section describes the changes that have been made, and what you need to do to update your Python files. +Bug Fixes +""""""""" +- Add "MENU" permission in auth manager (#37881) +- Fix external_executor_id being overwritten (#37784) +- Make more MappedOperator members modifiable (#37828) +- Set parsing context dag_id in dag test command (#37606) -Removed sub-package imports from ``airflow/__init__.py`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Miscellaneous +""""""""""""" +- Remove useless methods from security manager (#37889) +- Improve code coverage for TriggerRuleDep (#37680) +- The SMTP provider is now preinstalled when installing Airflow (#37713) +- Bump min versions of openapi validators (#37691) +- Properly include ``airflow_pre_installed_providers.txt`` artifact (#37679) -The imports ``LoggingMixin``\ , ``conf``\ , and ``AirflowException`` have been removed from ``airflow/__init__.py``. -All implicit references of these objects will no longer be valid. To migrate, all usages of each old path must be -replaced with its corresponding new path. +Doc Only Changes +"""""""""""""""" +- Clarify lack of sync between workers and scheduler (#37913) +- Simplify some docs around airflow_local_settings (#37835) +- Add section about local settings configuration (#37829) +- Fix docs of ``BranchDayOfWeekOperator`` (#37813) +- Write to secrets store is not supported by design (#37814) +- ``ERD`` generating doc improvement (#37808) +- Update incorrect config value (#37706) +- Update security model to clarify Connection Editing user's capabilities (#37688) +- Fix ImportError on examples dags (#37571) -.. list-table:: - :header-rows: 1 - * - Old Path (Implicit Import) - - New Path (Explicit Import) - * - ``airflow.LoggingMixin`` - - ``airflow.utils.log.logging_mixin.LoggingMixin`` - * - ``airflow.conf`` - - ``airflow.configuration.conf`` - * - ``airflow.AirflowException`` - - ``airflow.exceptions.AirflowException`` +Airflow 2.8.2 (2024-02-26) +-------------------------- +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Variables removed from the task instance context -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The ``allowed_deserialization_classes`` flag now follows a glob pattern (#36147). +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -The following variables were removed from the task instance context: +For example if one wants to add the class ``airflow.tests.custom_class`` to the +``allowed_deserialization_classes`` list, it can be done by writing the full class +name (``airflow.tests.custom_class``) or a pattern such as the ones used in glob +search (e.g., ``airflow.*``, ``airflow.tests.*``). +If you currently use a custom regexp path make sure to rewrite it as a glob pattern. -* end_date -* latest_date -* tables +Alternatively, if you still wish to match it as a regexp pattern, add it under the new +list ``allowed_deserialization_classes_regexp`` instead. -``airflow.contrib.utils.Weekday`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The audit_logs permissions have been updated for heightened security (#37501). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Formerly the core code was maintained by the original creators - Airbnb. The code that was in the contrib -package was supported by the community. The project was passed to the Apache community and currently the -entire code is maintained by the community, so now the division has no justification, and it is only due -to historical reasons. +This was done under the policy that we do not want users like Viewer, Ops, +and other users apart from Admin to have access to audit_logs. The intention behind +this change is to restrict users with less permissions from viewing user details +like First Name, Email etc. from the audit_logs when they are not permitted to. -To clean up, ``Weekday`` enum has been moved from ``airflow.contrib.utils`` into ``airflow.utils`` module. +The impact of this change is that the existing users with non admin rights won't be able +to view or access the audit_logs, both from the Browse tab or from the DAG run. -``airflow.models.connection.Connection`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``AirflowTimeoutError`` is no longer ``except`` by default through ``Exception`` (#35653). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -The connection module has new deprecated methods: +The ``AirflowTimeoutError`` is now inheriting ``BaseException`` instead of +``AirflowException``->``Exception``. +See https://docs.python.org/3/library/exceptions.html#exception-hierarchy +This prevents code catching ``Exception`` from accidentally +catching ``AirflowTimeoutError`` and continuing to run. +``AirflowTimeoutError`` is an explicit intent to cancel the task, and should not +be caught in attempts to handle the error and return some default value. -* ``Connection.parse_from_uri`` -* ``Connection.log_info`` -* ``Connection.debug_info`` +Catching ``AirflowTimeoutError`` is still possible by explicitly ``except``ing +``AirflowTimeoutError`` or ``BaseException``. +This is discouraged, as it may allow the code to continue running even after +such cancellation requests. +Code that previously depended on performing strict cleanup in every situation +after catching ``Exception`` is advised to use ``finally`` blocks or +context managers. To perform only the cleanup and then automatically +re-raise the exception. +See similar considerations about catching ``KeyboardInterrupt`` in +https://docs.python.org/3/library/exceptions.html#KeyboardInterrupt -and one deprecated function: +Bug Fixes +""""""""" +- Sort dag processing stats by last_runtime (#37302) +- Allow pre-population of trigger form values via URL parameters (#37497) +- Base date for fetching dag grid view must include selected run_id (#34887) +- Check permissions for ImportError (#37468) +- Move ``IMPORT_ERROR`` from DAG related permissions to view related permissions (#37292) +- Change ``AirflowTaskTimeout`` to inherit ``BaseException`` (#35653) +- Revert "Fix future DagRun rarely triggered by race conditions when max_active_runs reached its upper limit. (#31414)" (#37596) +- Change margin to padding so first task can be selected (#37527) +- Fix Airflow serialization for ``namedtuple`` (#37168) +- Fix bug with clicking url-unsafe tags (#37395) +- Set deterministic and new getter for ``Treeview`` function (#37162) +- Fix permissions of parent folders for log file handler (#37310) +- Fix permission check on DAGs when ``access_entity`` is specified (#37290) +- Fix the value of ``dateTimeAttrFormat`` constant (#37285) +- Resolve handler close race condition at triggerer shutdown (#37206) +- Fixing status icon alignment for various views (#36804) +- Remove superfluous ``@Sentry.enrich_errors`` (#37002) +- Use execution_date= param as a backup to base date for grid view (#37018) +- Handle SystemExit raised in the task. (#36986) +- Revoking audit_log permission from all users except admin (#37501) +- Fix broken regex for allowed_deserialization_classes (#36147) +- Fix the bug that affected the DAG end date. (#36144) +- Adjust node width based on task name length (#37254) +- fix: PythonVirtualenvOperator crashes if any python_callable function is defined in the same source as DAG (#37165) +- Fix collapsed grid width, line up selected bar with gantt (#37205) +- Adjust graph node layout (#37207) +- Revert the sequence of initializing configuration defaults (#37155) +- Displaying "actual" try number in TaskInstance view (#34635) +- Bugfix Triggering DAG with parameters is mandatory when show_trigger_form_if_no_params is enabled (#37063) +- Secret masker ignores passwords with special chars (#36692) +- Fix DagRuns with UPSTREAM_FAILED tasks get stuck in the backfill. (#36954) +- Disable ``dryrun`` auto-fetch (#36941) +- Fix copy button on a DAG run's config (#36855) +- Fix bug introduced by replacing spaces by + in run_id (#36877) +- Fix webserver always redirecting to home page if user was not logged in (#36833) +- REST API set description on POST to ``/variables`` endpoint (#36820) +- Sanitize the conn_id to disallow potential script execution (#32867) +- Fix task id copy button copying wrong id (#34904) +- Fix security manager inheritance in fab provider (#36538) +- Avoid ``pendulum.from_timestamp`` usage (#37160) -* ``parse_netloc_to_hostname`` +Miscellaneous +""""""""""""" +- Install latest docker ``CLI`` instead of specific one (#37651) +- Bump ``undici`` from ``5.26.3`` to ``5.28.3`` in ``/airflow/www`` (#37493) +- Add Python ``3.12`` exclusions in ``providers/pyproject.toml`` (#37404) +- Remove ``markdown`` from core dependencies (#37396) +- Remove unused ``pageSize`` method. (#37319) +- Add more-itertools as dependency of common-sql (#37359) +- Replace other ``Python 3.11`` and ``3.12`` deprecations (#37478) +- Include ``airflow_pre_installed_providers.txt`` into ``sdist`` distribution (#37388) +- Turn Pydantic into an optional dependency (#37320) +- Limit ``universal-pathlib to < 0.2.0`` (#37311) +- Allow running airflow against sqlite in-memory DB for tests (#37144) +- Add description to ``queue_when`` (#36997) +- Updated ``config.yml`` for environment variable ``sql_alchemy_connect_args`` (#36526) +- Bump min version of ``Alembic to 1.13.1`` (#36928) +- Limit ``flask-session`` to ``<0.6`` (#36895) -Previously, users could create a connection object in two ways +Doc Only Changes +"""""""""""""""" +- Fix upgrade docs to reflect true ``CLI`` flags available (#37231) +- Fix a bug in fundamentals doc (#37440) +- Add redirect for deprecated page (#37384) +- Fix the ``otel`` config descriptions (#37229) +- Update ``Objectstore`` tutorial with ``prereqs`` section (#36983) +- Add more precise description on avoiding generic ``package/module`` names (#36927) +- Add airflow version substitution into Docker Compose Howto (#37177) +- Add clarification about DAG author capabilities to security model (#37141) +- Move docs for cron basics to Authoring and Scheduling section (#37049) +- Link to release notes in the upgrade docs (#36923) +- Prevent templated field logic checks in ``__init__`` of operators automatically (#33786) -.. code-block:: - conn_1 = Connection(conn_id="conn_a", uri="mysql://AAA/") - # or - conn_2 = Connection(conn_id="conn_a") - conn_2.parse_uri(uri="mysql://AAA/") +Airflow 2.8.1 (2024-01-19) +-------------------------- -Now the second way is not supported. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``Connection.log_info`` and ``Connection.debug_info`` method have been deprecated. Read each Connection field individually or use the -default representation (\ ``__repr__``\ ). +Target version for core dependency ``pendulum`` package set to 3 (#36281). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Support for pendulum 2.1.2 will be saved for a while, presumably until the next feature version of Airflow. +It is advised to upgrade user code to use pendulum 3 as soon as possible. -The old method is still works but can be abandoned at any time. The changes are intended to delete method -that are rarely used. +Pendulum 3 introduced some subtle incompatibilities that you might rely on in your code - for example +default rendering of dates is missing ``T`` in the rendered date representation, which is not ISO8601 +compliant. If you rely on the default rendering of dates, you might need to adjust your code to use +``isoformat()`` method to render dates in ISO8601 format. -``airflow.models.dag.DAG.create_dagrun`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow packaging specification follows modern Python packaging standards (#36537). +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +We standardized Airflow dependency configuration to follow latest development in Python packaging by +using ``pyproject.toml``. Airflow is now compliant with those accepted PEPs: -DAG.create_dagrun accepts run_type and does not require run_id -This change is caused by adding ``run_type`` column to ``DagRun``. +* `PEP-440 Version Identification and Dependency Specification `__ +* `PEP-517 A build-system independent format for source trees `__ +* `PEP-518 Specifying Minimum Build System Requirements for Python Projects `__ +* `PEP-561 Distributing and Packaging Type Information `__ +* `PEP-621 Storing project metadata in pyproject.toml `__ +* `PEP-660 Editable installs for pyproject.toml based builds (wheel based) `__ +* `PEP-685 Comparison of extra names for optional distribution dependencies `__ -Previous signature: +Also we implement multiple license files support coming from Draft, not yet accepted (but supported by ``hatchling``) PEP: +* `PEP 639 Improving License Clarity with Better Package Metadata `__ -.. code-block:: python +This has almost no noticeable impact on users if they are using modern Python packaging and development tools, generally +speaking Airflow should behave as it did before when installing it from PyPI and it should be much easier to install +it for development purposes using ``pip install -e ".[devel]"``. - def create_dagrun( - self, - run_id, - state, - execution_date=None, - start_date=None, - external_trigger=False, - conf=None, - session=None, - ): ... +The differences from the user side are: -current: +* Airflow extras now get extras normalized to ``-`` (following PEP-685) instead of ``_`` and ``.`` + (as it was before in some extras). When you install airflow with such extras (for example ``dbt.core`` or + ``all_dbs``) you should use ``-`` instead of ``_`` and ``.``. -.. code-block:: python +In most modern tools this will work in backwards-compatible way, but in some old version of those tools you might need to +replace ``_`` and ``.`` with ``-``. You can also get warnings that the extra you are installing does not exist - but usually +this warning is harmless and the extra is installed anyway. It is, however, recommended to change to use ``-`` in extras in your dependency +specifications for all Airflow extras. - def create_dagrun( - self, - state, - execution_date=None, - run_id=None, - start_date=None, - external_trigger=False, - conf=None, - run_type=None, - session=None, - ): ... +* Released airflow package does not contain ``devel``, ``devel-*``, ``doc`` and ``docs-gen`` extras. + Those extras are only available when you install Airflow from sources in ``--editable`` mode. This is + because those extras are only used for development and documentation building purposes and are not needed + when you install Airflow for production use. Those dependencies had unspecified and varying behaviour for + released packages anyway and you were not supposed to use them in released packages. -If user provides ``run_id`` then the ``run_type`` will be derived from it by checking prefix, allowed types -: ``manual``\ , ``scheduled``\ , ``backfill`` (defined by ``airflow.utils.types.DagRunType``\ ). +* The ``all`` and ``all-*`` extras were not always working correctly when installing Airflow using constraints + because they were also considered as development-only dependencies. With this change, those dependencies are + now properly handling constraints and they will install properly with constraints, pulling the right set + of providers and dependencies when constraints are used. -If user provides ``run_type`` and ``execution_date`` then ``run_id`` is constructed as -``{run_type}__{execution_data.isoformat()}``. +Graphviz dependency is now an optional one, not required one (#36647). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +The ``graphviz`` dependency has been problematic as Airflow required dependency - especially for +ARM-based installations. Graphviz packages require binary graphviz libraries - which is already a +limitation, but they also require to install graphviz Python bindings to be build and installed. +This does not work for older Linux installation but - more importantly - when you try to install +Graphviz libraries for Python 3.8, 3.9 for ARM M1 MacBooks, the packages fail to install because +Python bindings compilation for M1 can only work for Python 3.10+. -Airflow should construct dagruns using ``run_type`` and ``execution_date``\ , creation using -``run_id`` is preserved for user actions. +This is not a breaking change technically - the CLIs to render the DAGs is still there and IF you +already have graphviz installed, it will continue working as it did before. The only problem when it +does not work is where you do not have graphviz installed it will raise an error and inform that you need it. -``airflow.models.dagrun.DagRun`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Graphviz will remain to be installed for most users: -Use DagRunType.SCHEDULED.value instead of DagRun.ID_PREFIX +* the Airflow Image will still contain graphviz library, because + it is added there as extra +* when previous version of Airflow has been installed already, then + graphviz library is already installed there and Airflow will + continue working as it did -All the run_id prefixes for different kind of DagRuns have been grouped into a single -enum in ``airflow.utils.types.DagRunType``. +The only change will be a new installation of new version of Airflow from the scratch, where graphviz will +need to be specified as extra or installed separately in order to enable DAG rendering option. -Previously, there were defined in various places, example as ``ID_PREFIX`` class variables for -``DagRun``\ , ``BackfillJob`` and in ``_trigger_dag`` function. +Bug Fixes +""""""""" +- Fix airflow-scheduler exiting with code 0 on exceptions (#36800) +- Fix Callback exception when a removed task is the last one in the ``taskinstance`` list (#36693) +- Allow anonymous user edit/show resource when set ``AUTH_ROLE_PUBLIC=admin`` (#36750) +- Better error message when sqlite URL uses relative path (#36774) +- Explicit string cast required to force integer-type run_ids to be passed as strings instead of integers (#36756) +- Add log lookup exception for empty ``op`` subtypes (#35536) +- Remove unused index on task instance (#36737) +- Fix check on subclass for ``typing.Union`` in ``_infer_multiple_outputs`` for Python 3.10+ (#36728) +- Make sure ``multiple_outputs`` is inferred correctly even when using ``TypedDict`` (#36652) +- Add back FAB constant in legacy security manager (#36719) +- Fix AttributeError when using ``Dagrun.update_state`` (#36712) +- Do not let ``EventsTimetable`` schedule past events if ``catchup=False`` (#36134) +- Support encryption for triggers parameters (#36492) +- Fix the type hint for ``tis_query`` in ``_process_executor_events`` (#36655) +- Redirect to index when user does not have permission to access a page (#36623) +- Avoid using dict as default value in ``call_regular_interval`` (#36608) +- Remove option to set a task instance to running state in UI (#36518) +- Fix details tab not showing when using dynamic task mapping (#36522) +- Raise error when ``DagRun`` fails while running ``dag test`` (#36517) +- Refactor ``_manage_executor_state`` by refreshing TIs in batch (#36502) +- Add flask config: ``MAX_CONTENT_LENGTH`` (#36401) +- Fix get_leaves calculation for teardown in nested group (#36456) +- Stop serializing timezone-naive datetime to timezone-aware datetime with UTC tz (#36379) +- Make ``kubernetes`` decorator type annotation consistent with operator (#36405) +- Fix Webserver returning 500 for POST requests to ``api/dag/*/dagrun`` from anonymous user (#36275) +- Fix the required access for get_variable endpoint (#36396) +- Fix datetime reference in ``DAG.is_fixed_time_schedule`` (#36370) +- Fix AirflowSkipException message raised by BashOperator (#36354) +- Allow PythonVirtualenvOperator.skip_on_exit_code to be zero (#36361) +- Increase width of execution_date input in trigger.html (#36278) +- Fix logging for pausing DAG (#36182) +- Stop deserializing pickle when enable_xcom_pickling is False (#36255) +- Check DAG read permission before accessing DAG code (#36257) +- Enable mark task as failed/success always (#36254) +- Create latest log dir symlink as relative link (#36019) +- Fix Python-based decorators templating (#36103) -Was: +Miscellaneous +""""""""""""" +- Rename concurrency label to max active tasks (#36691) +- Restore function scoped ``httpx`` import in file_task_handler for performance (#36753) +- Add support of Pendulum 3 (#36281) +- Standardize airflow build process and switch to ``hatchling`` build backend (#36537) +- Get rid of ``pyarrow-hotfix`` for ``CVE-2023-47248`` (#36697) +- Make ``graphviz`` dependency optional (#36647) +- Announce MSSQL support end in Airflow 2.9.0, add migration script hints (#36509) +- Set min ``pandas`` dependency to 1.2.5 for all providers and airflow (#36698) +- Bump follow-redirects from 1.15.3 to 1.15.4 in ``/airflow/www`` (#36700) +- Provide the logger_name param to base hook in order to override the logger name (#36674) +- Fix run type icon alignment with run type text (#36616) +- Follow BaseHook connection fields method signature in FSHook (#36444) +- Remove redundant ``docker`` decorator type annotations (#36406) +- Straighten typing in workday timetable (#36296) +- Use ``batch_is_authorized_dag`` to check if user has permission to read DAGs (#36279) +- Replace deprecated get_accessible_dag_ids and use get_readable_dags in get_dag_warnings (#36256) -.. code-block:: pycon +Doc Only Changes +"""""""""""""""" +- Metrics tagging documentation (#36627) +- In docs use logical_date instead of deprecated execution_date (#36654) +- Add section about live-upgrading Airflow (#36637) +- Replace ``numpy`` example with practical exercise demonstrating top-level code (#35097) +- Improve and add more complete description in the architecture diagrams (#36513) +- Improve the error message displayed when there is a webserver error (#36570) +- Update ``dags.rst`` with information on DAG pausing (#36540) +- Update installation prerequisites after upgrading to Debian Bookworm (#36521) +- Add description on the ways how users should approach DB monitoring (#36483) +- Add branching based on mapped task group example to dynamic-task-mapping.rst (#36480) +- Add further details to replacement documentation (#36485) +- Use cards when describing priority weighting methods (#36411) +- Update ``metrics.rst`` for param ``dagrun.schedule_delay`` (#36404) +- Update admonitions in Python operator doc to reflect sentiment (#36340) +- Improve audit_logs.rst (#36213) +- Remove Redshift mention from the list of managed Postgres backends (#36217) - >> from airflow.models.dagrun import DagRun - >> DagRun.ID_PREFIX - scheduled__ +Airflow 2.8.0 (2023-12-18) +-------------------------- -Replaced by: +Significant Changes +^^^^^^^^^^^^^^^^^^^ -.. code-block:: pycon +Raw HTML code in DAG docs and DAG params descriptions is disabled by default (#35460) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +To ensure that no malicious javascript can be injected with DAG descriptions or trigger UI forms by DAG authors +a new parameter ``webserver.allow_raw_html_descriptions`` was added with default value of ``False``. +If you trust your DAG authors code and want to allow using raw HTML in DAG descriptions and params, you can restore the previous +behavior by setting the configuration value to ``True``. - >> from airflow.utils.types import DagRunType - >> DagRunType.SCHEDULED.value - scheduled +To ensure Airflow is secure by default, the raw HTML support in trigger UI has been super-seeded by markdown support via +the ``description_md`` attribute. If you have been using ``description_html`` please migrate to ``description_md``. +The ``custom_html_form`` is now deprecated. -``airflow.utils.file.TemporaryDirectory`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +New Features +"""""""""""" +- AIP-58: Add Airflow ObjectStore (AFS) (`AIP-58 `_) +- Add XCom tab to Grid (#35719) +- Add "literal" wrapper to disable field templating (#35017) +- Add task context logging feature to allow forwarding messages to task logs (#32646, #32693, #35857) +- Add Listener hooks for Datasets (#34418, #36247) +- Allow override of navbar text color (#35505) +- Add lightweight serialization for deltalake tables (#35462) +- Add support for serialization of iceberg tables (#35456) +- ``prev_end_date_success`` method access (#34528) +- Add task parameter to set custom logger name (#34964) +- Add pyspark decorator (#35247) +- Add trigger as a valid option for the db clean command (#34908) +- Add decorators for external and venv python branching operators (#35043) +- Allow PythonVenvOperator using other index url (#33017) +- Add Python Virtualenv Operator Caching (#33355) +- Introduce a generic export for containerized executor logging (#34903) +- Add ability to clear downstream tis in ``List Task Instances`` view (#34529) +- Attribute ``clear_number`` to track DAG run being cleared (#34126) +- Add BranchPythonVirtualenvOperator (#33356) +- Allow PythonVenvOperator using other index url (#33017) +- Add CLI notification commands to providers (#33116) +- Use dropdown instead of buttons when there are more than 10 retries in log tab (#36025) -We remove ``airflow.utils.file.TemporaryDirectory`` -Since Airflow dropped support for Python < 3.5 there's no need to have this custom -implementation of ``TemporaryDirectory`` because the same functionality is provided by -``tempfile.TemporaryDirectory``. +Improvements +"""""""""""" +- Add ``multiselect`` to run state in grid view (#35403) +- Fix warning message in ``Connection.get_hook`` in case of ImportError (#36005) +- Add processor_subdir to import_error table to handle multiple dag processors (#35956) +- Consolidate the call of change_state to fail or success in the core executors (#35901) +- Relax mandatory requirement for start_date when schedule=None (#35356) +- Use ExitStack to manage mutation of secrets_backend_list in dag.test (#34620) +- improved visibility of tasks in ActionModal for ``taskinstance`` (#35810) +- Create directories based on ``AIRFLOW_CONFIG`` path (#35818) +- Implements ``JSON-string`` connection representation generator (#35723) +- Move ``BaseOperatorLink`` into the separate module (#35032) +- Set mark_end_on_close after set_context (#35761) +- Move external logs links to top of react logs page (#35668) +- Change terminal mode to ``cbreak`` in ``execute_interactive`` and handle ``SIGINT`` (#35602) +- Make raw HTML descriptions configurable (#35460) +- Allow email field to be templated (#35546) +- Hide logical date and run id in trigger UI form (#35284) +- Improved instructions for adding dependencies in TaskFlow (#35406) +- Add optional exit code to list import errors (#35378) +- Limit query result on DB rather than client in ``synchronize_log_template`` function (#35366) +- Allow description to be passed in when using variables CLI (#34791) +- Allow optional defaults in required fields with manual triggered dags (#31301) +- Permitting airflow kerberos to run in different modes (#35146) +- Refactor commands to unify daemon context handling (#34945) +- Add extra fields to plugins endpoint (#34913) +- Add description to pools view (#34862) +- Move cli's Connection export and Variable export command print logic to a separate function (#34647) +- Extract and reuse get_kerberos_principle func from get_kerberos_principle (#34936) +- Change type annotation for ``BaseOperatorLink.operators`` (#35003) +- Optimise and migrate to ``SA2-compatible`` syntax for TaskReschedule (#33720) +- Consolidate the permissions name in SlaMissModelView (#34949) +- Add debug log saying what's being run to ``EventScheduler`` (#34808) +- Increase log reader stream loop sleep duration to 1 second (#34789) +- Resolve pydantic deprecation warnings re ``update_forward_refs`` (#34657) +- Unify mapped task group lookup logic (#34637) +- Allow filtering event logs by attributes (#34417) +- Make connection login and password TEXT (#32815) +- Ban import ``Dataset`` from ``airflow`` package in codebase (#34610) +- Use ``airflow.datasets.Dataset`` in examples and tests (#34605) +- Enhance task status visibility (#34486) +- Simplify DAG trigger UI (#34567) +- Ban import AirflowException from airflow (#34512) +- Add descriptions for airflow resource config parameters (#34438) +- Simplify trigger name expression (#34356) +- Move definition of Pod*Exceptions to pod_generator (#34346) +- Add deferred tasks to the cluster_activity view Pools Slots (#34275) +- heartbeat failure log message fix (#34160) +- Rename variables for dag runs (#34049) +- Clarify new_state in OpenAPI spec (#34056) +- Remove ``version`` top-level element from docker compose files (#33831) +- Remove generic trigger cancelled error log (#33874) +- Use ``NOT EXISTS`` subquery instead of ``tuple_not_in_condition`` (#33527) +- Allow context key args to not provide a default (#33430) +- Order triggers by - TI priority_weight when assign unassigned triggers (#32318) +- Add metric ``triggerer_heartbeat`` (#33320) +- Allow ``airflow variables export`` to print to stdout (#33279) +- Workaround failing deadlock when running backfill (#32991) +- add dag_run_ids and task_ids filter for the batch task instance API endpoint (#32705) +- Configurable health check threshold for triggerer (#33089) +- Rework provider manager to treat Airflow core hooks like other provider hooks (#33051) +- Ensure DAG-level references are filled on unmap (#33083) +- Affix webserver access_denied warning to be configurable (#33022) +- Add support for arrays of different data types in the Trigger Form UI (#32734) +- Add a mechanism to warn if executors override existing CLI commands (#33423) -Now users instead of ``import from airflow.utils.files import TemporaryDirectory`` should -do ``from tempfile import TemporaryDirectory``. Both context managers provide the same -interface, thus no additional changes should be required. +Bug Fixes +""""""""" +- Account for change in UTC offset when calculating next schedule (#35887) +- Add read access to pools for viewer role (#35352) +- Fix gantt chart queued duration when queued_dttm is greater than start_date for deferred tasks (#35984) +- Avoid crushing container when directory is not found on rm (#36050) +- Update ``reset_user_sessions`` to work from either CLI or web (#36056) +- Fix UI Grid error when DAG has been removed. (#36028) +- Change Trigger UI to use HTTP POST in web ui (#36026) +- Fix airflow db shell needing an extra key press to exit (#35982) +- Change dag grid ``overscroll`` behaviour to auto (#35717) +- Run triggers inline with dag test (#34642) +- Add ``borderWidthRight`` to grid for Firefox ``scrollbar`` (#35346) +- Fix for infinite recursion due to secrets_masker (#35048) +- Fix write ``processor_subdir`` in serialized_dag table (#35661) +- Reload configuration for standalone dag file processor (#35725) +- Long custom operator name overflows in graph view (#35382) +- Add try_number to extra links query (#35317) +- Prevent assignment of non JSON serializable values to DagRun.conf dict (#35096) +- Numeric values in DAG details are incorrectly rendered as timestamps (#35538) +- Fix Scheduler and triggerer crashes in daemon mode when statsd metrics are enabled (#35181) +- Infinite UI redirection loop after deactivating an active user (#35486) +- Bug fix fetch_callback of Partial Subset DAG (#35256) +- Fix DagRun data interval for DeltaDataIntervalTimetable (#35391) +- Fix query in ``get_dag_by_pickle`` util function (#35339) +- Fix TriggerDagRunOperator failing to trigger subsequent runs when reset_dag_run=True (#35429) +- Fix weight_rule property type in ``mappedoperator`` (#35257) +- Bugfix/prevent concurrency with cached venv (#35258) +- Fix dag serialization (#34042) +- Fix py/url-redirection by replacing request.referrer by get_redirect() (#34237) +- Fix updating variables during variable imports (#33932) +- Use Literal from airflow.typing_compat in Airflow core (#33821) +- Always use ``Literal`` from ``typing_extensions`` (#33794) -``airflow.AirflowMacroPlugin`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Miscellaneous +""""""""""""" +- Change default MySQL client to MariaDB (#36243) +- Mark daskexecutor provider as removed (#35965) +- Bump FAB to ``4.3.10`` (#35991) +- Mark daskexecutor provider as removed (#35965) +- Rename ``Connection.to_json_dict`` to ``Connection.to_dict`` (#35894) +- Upgrade to Pydantic v2 (#35551) +- Bump ``moto`` version to ``>= 4.2.9`` (#35687) +- Use ``pyarrow-hotfix`` to mitigate CVE-2023-47248 (#35650) +- Bump ``axios`` from ``0.26.0 to 1.6.0`` in ``/airflow/www/`` (#35624) +- Make docker decorator's type annotation consistent with operator (#35568) +- Add default to ``navbar_text_color`` and ``rm`` condition in style (#35553) +- Avoid initiating session twice in ``dag_next_execution`` (#35539) +- Work around typing issue in examples and providers (#35494) +- Enable ``TCH004`` and ``TCH005`` rules (#35475) +- Humanize log output about retrieved DAG(s) (#35338) +- Switch from Black to Ruff formatter (#35287) +- Upgrade to Flask Application Builder 4.3.9 (#35085) +- D401 Support (#34932, #34933) +- Use requires_access to check read permission on dag instead of checking it explicitly (#34940) +- Deprecate lazy import ``AirflowException`` from airflow (#34541) +- View util refactoring on mapped stuff use cases (#34638) +- Bump ``postcss`` from ``8.4.25 to 8.4.31`` in ``/airflow/www`` (#34770) +- Refactor Sqlalchemy queries to 2.0 style (#34763, #34665, #32883, #35120) +- Change to lazy loading of io in pandas serializer (#34684) +- Use ``airflow.models.dag.DAG`` in examples (#34617) +- Use airflow.exceptions.AirflowException in core (#34510) +- Check that dag_ids passed in request are consistent (#34366) +- Refactors to make code better (#34278, #34113, #34110, #33838, #34260, #34409, #34377, #34350) +- Suspend qubole provider (#33889) +- Generate Python API docs for Google ADS (#33814) +- Improve importing in modules (#33812, #33811, #33810, #33806, #33807, #33805, #33804, #33803, + #33801, #33799, #33800, #33797, #33798, #34406, #33808) +- Upgrade Elasticsearch to 8 (#33135) -We removed ``airflow.AirflowMacroPlugin`` class. The class was there in airflow package but it has not been used (apparently since 2015). -It has been removed. +Doc Only Changes +"""""""""""""""" +- Add support for tabs (and other UX components) to docs (#36041) +- Replace architecture diagram of Airflow with diagrams-generated one (#36035) +- Add the section describing the security model of DAG Author capabilities (#36022) +- Enhance docs for zombie tasks (#35825) +- Reflect drop/add support of DB Backends versions in documentation (#35785) +- More detail on mandatory task arguments (#35740) +- Indicate usage of the ``re2`` regex engine in the .airflowignore documentation. (#35663) +- Update ``best-practices.rst`` (#35692) +- Update ``dag-run.rst`` to mention Airflow's support for extended cron syntax through croniter (#35342) +- Update ``webserver.rst`` to include information of supported OAuth2 providers (#35237) +- Add back dag_run to docs (#35142) +- Fix ``rst`` code block format (#34708) +- Add typing to concrete taskflow examples (#33417) +- Add concrete examples for accessing context variables from TaskFlow tasks (#33296) +- Fix links in security docs (#33329) -``airflow.settings.CONTEXT_MANAGER_DAG`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -CONTEXT_MANAGER_DAG was removed from settings. Its role has been taken by ``DagContext`` in -'airflow.models.dag'. One of the reasons was that settings should be rather static than store -dynamic context from the DAG, but the main one is that moving the context out of settings allowed to -untangle cyclic imports between DAG, BaseOperator, SerializedDAG, SerializedBaseOperator which was -part of AIRFLOW-6010. +Airflow 2.7.3 (2023-11-06) +-------------------------- -``airflow.utils.log.logging_mixin.redirect_stderr`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``airflow.utils.log.logging_mixin.redirect_stdout`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +No significant changes. -Function ``redirect_stderr`` and ``redirect_stdout`` from ``airflow.utils.log.logging_mixin`` module has -been deleted because it can be easily replaced by the standard library. -The functions of the standard library are more flexible and can be used in larger cases. +Bug Fixes +""""""""" +- Fix pre-mature evaluation of tasks in mapped task group (#34337) +- Add TriggerRule missing value in rest API (#35194) +- Fix Scheduler crash looping when dagrun creation fails (#35135) +- Fix test connection with ``codemirror`` and extra (#35122) +- Fix usage of cron-descriptor since BC in v1.3.0 (#34836) +- Fix ``get_plugin_info`` for class based listeners. (#35022) +- Some improvements/fixes for dag_run and task_instance endpoints (#34942) +- Fix the dags count filter in webserver home page (#34944) +- Return only the TIs of the readable dags when ~ is provided as a dag_id (#34939) +- Fix triggerer thread crash in daemon mode (#34931) +- Fix wrong plugin schema (#34858) +- Use DAG timezone in TimeSensorAsync (#33406) +- Mark tasks with ``all_skipped`` trigger rule as ``skipped`` if any task is in ``upstream_failed`` state (#34392) +- Add read only validation to read only fields (#33413) -The code below +Misc/Internal +""""""""""""" +- Improve testing harness to separate DB and non-DB tests (#35160, #35333) +- Add pytest db_test markers to our tests (#35264) +- Add pip caching for faster build (#35026) +- Upper bound ``pendulum`` requirement to ``<3.0`` (#35336) +- Limit ``sentry_sdk`` to ``1.33.0`` (#35298) +- Fix subtle bug in mocking processor_agent in our tests (#35221) +- Bump ``@babel/traverse`` from ``7.16.0 to 7.23.2`` in ``/airflow/www`` (#34988) +- Bump ``undici`` from ``5.19.1 to 5.26.3`` in ``/airflow/www`` (#34971) +- Remove unused set from ``SchedulerJobRunner`` (#34810) +- Remove warning about ``max_tis per query > parallelism`` (#34742) +- Improve modules import in Airflow core by moving some of them into a type-checking block (#33755) +- Fix tests to respond to Python 3.12 handling of utcnow in sentry-sdk (#34946) +- Add ``connexion<3.0`` upper bound (#35218) +- Limit Airflow to ``< 3.12`` (#35123) +- update moto version (#34938) +- Limit WTForms to below ``3.1.0`` (#34943) -.. code-block:: python +Doc Only Changes +"""""""""""""""" +- Fix variables substitution in Airflow Documentation (#34462) +- Added example for defaults in ``conn.extras`` (#35165) +- Update datasets.rst issue with running example code (#35035) +- Remove ``mysql-connector-python`` from recommended MySQL driver (#34287) +- Fix syntax error in task dependency ``set_downstream`` example (#35075) +- Update documentation to enable test connection (#34905) +- Update docs errors.rst - Mention sentry "transport" configuration option (#34912) +- Update dags.rst to put SubDag deprecation note right after the SubDag section heading (#34925) +- Add info on getting variables and config in custom secrets backend (#34834) +- Document BaseExecutor interface in more detail to help users in writing custom executors (#34324) +- Fix broken link to ``airflow_local_settings.py`` template (#34826) +- Fixes python_callable function assignment context kwargs example in params.rst (#34759) +- Add missing multiple_outputs=True param in the TaskFlow example (#34812) +- Remove extraneous ``'>'`` in provider section name (#34813) +- Fix imports in extra link documentation (#34547) - import logging - from airflow.utils.log.logging_mixin import redirect_stderr, redirect_stdout - logger = logging.getLogger("custom-logger") - with redirect_stdout(logger, logging.INFO), redirect_stderr(logger, logging.WARN): - print("I love Airflow") +Airflow 2.7.2 (2023-10-12) +-------------------------- -can be replaced by the following code: +Significant Changes +^^^^^^^^^^^^^^^^^^^ -.. code-block:: python +No significant changes - from contextlib import redirect_stdout, redirect_stderr - import logging - from airflow.utils.log.logging_mixin import StreamLogWriter +Bug Fixes +""""""""" +- Check if the lower of provided values are sensitives in config endpoint (#34712) +- Add support for ZoneInfo and generic UTC to fix datetime serialization (#34683, #34804) +- Fix AttributeError: 'Select' object has no attribute 'count' during the airflow db migrate command (#34348) +- Make dry run optional for patch task instance (#34568) +- Fix non deterministic datetime deserialization (#34492) +- Use iterative loop to look for mapped parent (#34622) +- Fix is_parent_mapped value by checking if any of the parent ``taskgroup`` is mapped (#34587) +- Avoid top-level airflow import to avoid circular dependency (#34586) +- Add more exemptions to lengthy metric list (#34531) +- Fix dag warning endpoint permissions (#34355) +- Fix task instance access issue in the batch endpoint (#34315) +- Correcting wrong time showing in grid view (#34179) +- Fix www ``cluster_activity`` view not loading due to ``standaloneDagProcessor`` templating (#34274) +- Set ``loglevel=DEBUG`` in 'Not syncing ``DAG-level`` permissions' (#34268) +- Make param validation consistent for DAG validation and triggering (#34248) +- Ensure details panel is shown when any tab is selected (#34136) +- Fix issues related to ``access_control={}`` (#34114) +- Fix not found ``ab_user`` table in the CLI session (#34120) +- Fix FAB-related logging format interpolation (#34139) +- Fix query bug in ``next_run_datasets_summary`` endpoint (#34143) +- Fix for TaskGroup toggles for duplicated labels (#34072) +- Fix the required permissions to clear a TI from the UI (#34123) +- Reuse ``_run_task_session`` in mapped ``render_template_fields`` (#33309) +- Fix scheduler logic to plan new dag runs by ignoring manual runs (#34027) +- Add missing audit logs for Flask actions add, edit and delete (#34090) +- Hide Irrelevant Dag Processor from Cluster Activity Page (#33611) +- Remove infinite animation for pinwheel, spin for 1.5s (#34020) +- Restore rendering of provider configuration with ``version_added`` (#34011) - logger = logging.getLogger("custom-logger") +Doc Only Changes +"""""""""""""""" +- Clarify audit log permissions (#34815) +- Add explanation for Audit log users (#34814) +- Import ``AUTH_REMOTE_USER`` from FAB in WSGI middleware example (#34721) +- Add information about drop support MsSQL as DB Backend in the future (#34375) +- Document how to use the system's timezone database (#34667) +- Clarify what landing time means in doc (#34608) +- Fix screenshot in dynamic task mapping docs (#34566) +- Fix class reference in Public Interface documentation (#34454) +- Clarify var.value.get and var.json.get usage (#34411) +- Schedule default value description (#34291) +- Docs for triggered_dataset_event (#34410) +- Add DagRun events (#34328) +- Provide tabular overview about trigger form param types (#34285) +- Add link to Amazon Provider Configuration in Core documentation (#34305) +- Add "security infrastructure" paragraph to security model (#34301) +- Change links to SQLAlchemy 1.4 (#34288) +- Add SBOM entry in security documentation (#34261) +- Added more example code for XCom push and pull (#34016) +- Add state utils to Public Airflow Interface (#34059) +- Replace markdown style link with rst style link (#33990) +- Fix broken link to the "UPDATING.md" file (#33583) - with ( - redirect_stdout(StreamLogWriter(logger, logging.INFO)), - redirect_stderr(StreamLogWriter(logger, logging.WARN)), - ): - print("I Love Airflow") +Misc/Internal +""""""""""""" +- Update min-sqlalchemy version to account for latest features used (#34293) +- Fix SesssionExemptMixin spelling (#34696) +- Restrict ``astroid`` version < 3 (#34658) +- Fail dag test if defer without triggerer (#34619) +- Fix connections exported output (#34640) +- Don't run isort when creating new alembic migrations (#34636) +- Deprecate numeric type python version in PythonVirtualEnvOperator (#34359) +- Refactor ``os.path.splitext`` to ``Path.*`` (#34352, #33669) +- Replace = by is for type comparison (#33983) +- Refactor integer division (#34180) +- Refactor: Simplify comparisons (#34181) +- Refactor: Simplify string generation (#34118) +- Replace unnecessary dict comprehension with dict() in core (#33858) +- Change "not all" to "any" for ease of readability (#34259) +- Replace assert by if...raise in code (#34250, #34249) +- Move default timezone to except block (#34245) +- Combine similar if logic in core (#33988) +- Refactor: Consolidate import and usage of random (#34108) +- Consolidate importing of os.path.* (#34060) +- Replace sequence concatenation by unpacking in Airflow core (#33934) +- Refactor unneeded 'continue' jumps around the repo (#33849, #33845, #33846, #33848, #33839, #33844, #33836, #33842) +- Remove [project] section from ``pyproject.toml`` (#34014) +- Move the try outside the loop when this is possible in Airflow core (#33975) +- Replace loop by any when looking for a positive value in core (#33985) +- Do not create lists we don't need (#33519) +- Remove useless string join from core (#33969) +- Add TCH001 and TCH002 rules to pre-commit to detect and move type checking modules (#33865) +- Add cancel_trigger_ids to to_cancel dequeue in batch (#33944) +- Avoid creating unnecessary list when parsing stats datadog tags (#33943) +- Replace dict.items by dict.values when key is not used in core (#33940) +- Replace lambdas with comprehensions (#33745) +- Improve modules import in Airflow core by some of them into a type-checking block (#33755) +- Refactor: remove unused state - SHUTDOWN (#33746, #34063, #33893) +- Refactor: Use in-place .sort() (#33743) +- Use literal dict instead of calling dict() in Airflow core (#33762) +- remove unnecessary map and rewrite it using list in Airflow core (#33764) +- Replace lambda by a def method in Airflow core (#33758) +- Replace type func by ``isinstance`` in fab_security manager (#33760) +- Replace single quotes by double quotes in all Airflow modules (#33766) +- Merge multiple ``isinstance`` calls for the same object in a single call (#33767) +- Use a single statement with multiple contexts instead of nested statements in core (#33769) +- Refactor: Use f-strings (#33734, #33455) +- Refactor: Use random.choices (#33631) +- Use ``str.splitlines()`` to split lines (#33592) +- Refactor: Remove useless str() calls (#33629) +- Refactor: Improve detection of duplicates and list sorting (#33675) +- Simplify conditions on ``len()`` (#33454) -``airflow.models.baseoperator.BaseOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Now, additional arguments passed to BaseOperator cause an exception. Previous versions of Airflow took additional arguments and displayed a message on the console. When the -message was not noticed by users, it caused very difficult to detect errors. +Airflow 2.7.1 (2023-09-07) +-------------------------- -In order to restore the previous behavior, you must set an ``True`` in the ``allow_illegal_arguments`` -option of section ``[operators]`` in the ``airflow.cfg`` file. In the future it is possible to completely -delete this option. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``airflow.models.dagbag.DagBag`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +CronTriggerTimetable is now less aggressive when trying to skip a run (#33404) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Passing ``store_serialized_dags`` argument to DagBag.\ **init** and accessing ``DagBag.store_serialized_dags`` property -are deprecated and will be removed in future versions. +When setting ``catchup=False``, CronTriggerTimetable no longer skips a run if +the scheduler does not query the timetable immediately after the previous run +has been triggered. -**Previous signature**\ : +This should not affect scheduling in most cases, but can change the behaviour if +a DAG is paused-unpaused to manually skip a run. Previously, the timetable (with +``catchup=False``) would only start a run after a DAG is unpaused, but with this +change, the scheduler would try to look at little bit back to schedule the +previous run that covers a part of the period when the DAG was paused. This +means you will need to keep a DAG paused longer (namely, for the entire cron +period to pass) to really skip a run. -.. code-block:: python +Note that this is also the behaviour exhibited by various other cron-based +scheduling tools, such as ``anacron``. - def __init__( - dag_folder=None, - include_examples=conf.getboolean("core", "LOAD_EXAMPLES"), - safe_mode=conf.getboolean("core", "DAG_DISCOVERY_SAFE_MODE"), - store_serialized_dags=False, - ): ... +``conf.set()`` becomes case insensitive to match ``conf.get()`` behavior (#33452) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -**current**\ : +Also, ``conf.get()`` will now break if used with non-string parameters. -.. code-block:: python +``conf.set(section, key, value)`` used to be case sensitive, i.e. ``conf.set("SECTION", "KEY", value)`` +and ``conf.set("section", "key", value)`` were stored as two distinct configurations. +This was inconsistent with the behavior of ``conf.get(section, key)``, which was always converting the section and key to lower case. - def __init__( - dag_folder=None, - include_examples=conf.getboolean("core", "LOAD_EXAMPLES"), - safe_mode=conf.getboolean("core", "DAG_DISCOVERY_SAFE_MODE"), - read_dags_from_db=False, - ): ... - -If you were using positional arguments, it requires no change but if you were using keyword -arguments, please change ``store_serialized_dags`` to ``read_dags_from_db``. - -Similarly, if you were using ``DagBag().store_serialized_dags`` property, change it to -``DagBag().read_dags_from_db``. - -Changes in ``google`` provider package -"""""""""""""""""""""""""""""""""""""""""" - -We strive to ensure that there are no changes that may affect the end user and your Python files, but this -release may contain changes that will require changes to your configuration, DAG Files or other integration -e.g. custom operators. - -Only changes unique to this provider are described here. You should still pay attention to the changes that -have been made to the core (including core operators) as they can affect the integration behavior -of this provider. - -This section describes the changes that have been made, and what you need to do to update your if -you use operators or hooks which integrate with Google services (including Google Cloud - GCP). - -Direct impersonation added to operators communicating with Google services -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -`Directly impersonating a service account `_ -has been made possible for operators communicating with Google services via new argument called ``impersonation_chain`` -(\ ``google_impersonation_chain`` in case of operators that also communicate with services of other cloud providers). -As a result, GCSToS3Operator no longer derivatives from GCSListObjectsOperator. - -Normalize gcp_conn_id for Google Cloud -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Previously not all hooks and operators related to Google Cloud use -``gcp_conn_id`` as parameter for GCP connection. There is currently one parameter -which apply to most services. Parameters like ``datastore_conn_id``\ , ``bigquery_conn_id``\ , -``google_cloud_storage_conn_id`` and similar have been deprecated. Operators that require two connections are not changed. - -Following components were affected by normalization: - - -* ``airflow.providers.google.cloud.hooks.datastore.DatastoreHook`` -* ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook`` -* ``airflow.providers.google.cloud.hooks.gcs.GoogleCloudStorageHook`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryCheckOperator`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryValueCheckOperator`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryGetDataOperator`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryOperator`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`` -* ``airflow.providers.google.cloud.operators.bigquery.BigQueryTableDeleteOperator`` -* ``airflow.providers.google.cloud.operators.gcs.GoogleCloudStorageCreateBucketOperator`` -* ``airflow.providers.google.cloud.operators.gcs.GoogleCloudStorageListOperator`` -* ``airflow.providers.google.cloud.operators.gcs.GoogleCloudStorageDownloadOperator`` -* ``airflow.providers.google.cloud.operators.gcs.GoogleCloudStorageDeleteOperator`` -* ``airflow.providers.google.cloud.operators.gcs.GoogleCloudStorageBucketCreateAclEntryOperator`` -* ``airflow.providers.google.cloud.operators.gcs.GoogleCloudStorageObjectCreateAclEntryOperator`` -* ``airflow.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator`` -* ``airflow.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator`` -* ``airflow.operators.gcs_to_s3.GoogleCloudStorageToS3Operator`` -* ``airflow.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator`` -* ``airflow.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator`` -* ``airflow.operators.local_to_gcs.FileToGoogleCloudStorageOperator`` -* ``airflow.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator`` -* ``airflow.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator`` - -Changes to import paths and names of GCP operators and hooks -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -According to `AIP-21 `_ -operators related to Google Cloud has been moved from contrib to core. -The following table shows changes in import paths. - -.. list-table:: - :header-rows: 1 - - * - Old path - - New path - * - ``airflow.contrib.hooks.bigquery_hook.BigQueryHook`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook`` - * - ``airflow.contrib.hooks.datastore_hook.DatastoreHook`` - - ``airflow.providers.google.cloud.hooks.datastore.DatastoreHook`` - * - ``airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook`` - - ``airflow.providers.google.cloud.hooks.bigtable.BigtableHook`` - * - ``airflow.contrib.hooks.gcp_cloud_build_hook.CloudBuildHook`` - - ``airflow.providers.google.cloud.hooks.cloud_build.CloudBuildHook`` - * - ``airflow.contrib.hooks.gcp_container_hook.GKEClusterHook`` - - ``airflow.providers.google.cloud.hooks.kubernetes_engine.GKEHook`` - * - ``airflow.contrib.hooks.gcp_compute_hook.GceHook`` - - ``airflow.providers.google.cloud.hooks.compute.ComputeEngineHook`` - * - ``airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook`` - - ``airflow.providers.google.cloud.hooks.dataflow.DataflowHook`` - * - ``airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook`` - - ``airflow.providers.google.cloud.hooks.dataproc.DataprocHook`` - * - ``airflow.contrib.hooks.gcp_dlp_hook.CloudDLPHook`` - - ``airflow.providers.google.cloud.hooks.dlp.CloudDLPHook`` - * - ``airflow.contrib.hooks.gcp_function_hook.GcfHook`` - - ``airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook`` - * - ``airflow.contrib.hooks.gcp_kms_hook.GoogleCloudKMSHook`` - - ``airflow.providers.google.cloud.hooks.kms.CloudKMSHook`` - * - ``airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook`` - - ``airflow.providers.google.cloud.hooks.mlengine.MLEngineHook`` - * - ``airflow.contrib.hooks.gcp_natural_language_hook.CloudNaturalLanguageHook`` - - ``airflow.providers.google.cloud.hooks.natural_language.CloudNaturalLanguageHook`` - * - ``airflow.contrib.hooks.gcp_pubsub_hook.PubSubHook`` - - ``airflow.providers.google.cloud.hooks.pubsub.PubSubHook`` - * - ``airflow.contrib.hooks.gcp_speech_to_text_hook.GCPSpeechToTextHook`` - - ``airflow.providers.google.cloud.hooks.speech_to_text.CloudSpeechToTextHook`` - * - ``airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook`` - - ``airflow.providers.google.cloud.hooks.spanner.SpannerHook`` - * - ``airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook`` - - ``airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook`` - * - ``airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook`` - - ``airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook`` - * - ``airflow.contrib.hooks.gcp_tasks_hook.CloudTasksHook`` - - ``airflow.providers.google.cloud.hooks.tasks.CloudTasksHook`` - * - ``airflow.contrib.hooks.gcp_text_to_speech_hook.GCPTextToSpeechHook`` - - ``airflow.providers.google.cloud.hooks.text_to_speech.CloudTextToSpeechHook`` - * - ``airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook`` - - ``airflow.providers.google.cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook`` - * - ``airflow.contrib.hooks.gcp_translate_hook.CloudTranslateHook`` - - ``airflow.providers.google.cloud.hooks.translate.CloudTranslateHook`` - * - ``airflow.contrib.hooks.gcp_video_intelligence_hook.CloudVideoIntelligenceHook`` - - ``airflow.providers.google.cloud.hooks.video_intelligence.CloudVideoIntelligenceHook`` - * - ``airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook`` - - ``airflow.providers.google.cloud.hooks.vision.CloudVisionHook`` - * - ``airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook`` - - ``airflow.providers.google.cloud.hooks.gcs.GCSHook`` - * - ``airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator`` - - ``airflow.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator`` - * - ``airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryCheckOperator`` - * - ``airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator`` - * - ``airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryValueCheckOperator`` - * - ``airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryGetDataOperator`` - * - ``airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`` - * - ``airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator`` - * - ``airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator`` - * - ``airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator`` - * - ``airflow.contrib.operators.bigquery_operator.BigQueryOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator`` - * - ``airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator`` - - ``airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`` - * - ``airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator`` - - ``airflow.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator`` - * - ``airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator`` - - ``airflow.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator`` - * - ``airflow.contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator`` - - ``airflow.operators.bigquery_to_mysql.BigQueryToMySqlOperator`` - * - ``airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator`` - - ``airflow.providers.google.cloud.operators.dataflow.DataFlowJavaOperator`` - * - ``airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator`` - - ``airflow.providers.google.cloud.operators.dataflow.DataFlowPythonOperator`` - * - ``airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dataflow.DataflowTemplateOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataProcHiveOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataProcJobBaseOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocJobBaseOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataProcPigOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataProcSparkOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocDeleteClusterOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocScaleClusterOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataprocOperationBaseOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocOperationBaseOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator`` - * - ``airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator`` - - ``airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator`` - * - ``airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator`` - - ``airflow.providers.google.cloud.operators.datastore.DatastoreExportOperator`` - * - ``airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator`` - - ``airflow.providers.google.cloud.operators.datastore.DatastoreImportOperator`` - * - ``airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator`` - - ``airflow.providers.google.cloud.transfers.local_to_gcs.FileToGoogleCloudStorageOperator`` - * - ``airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator`` - - ``airflow.providers.google.cloud.operators.bigtable.BigtableUpdateClusterOperator`` - * - ``airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator`` - - ``airflow.providers.google.cloud.operators.bigtable.BigtableCreateInstanceOperator`` - * - ``airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator`` - - ``airflow.providers.google.cloud.operators.bigtable.BigtableDeleteInstanceOperator`` - * - ``airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator`` - - ``airflow.providers.google.cloud.operators.bigtable.BigtableCreateTableOperator`` - * - ``airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator`` - - ``airflow.providers.google.cloud.operators.bigtable.BigtableDeleteTableOperator`` - * - ``airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor`` - - ``airflow.providers.google.cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor`` - * - ``airflow.contrib.operators.gcp_cloud_build_operator.CloudBuildCreateBuildOperator`` - - ``airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildOperator`` - * - ``airflow.contrib.operators.gcp_compute_operator.GceBaseOperator`` - - ``airflow.providers.google.cloud.operators.compute.GceBaseOperator`` - * - ``airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator`` - - ``airflow.providers.google.cloud.operators.compute.GceInstanceGroupManagerUpdateTemplateOperator`` - * - ``airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator`` - - ``airflow.providers.google.cloud.operators.compute.GceInstanceStartOperator`` - * - ``airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator`` - - ``airflow.providers.google.cloud.operators.compute.GceInstanceStopOperator`` - * - ``airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator`` - - ``airflow.providers.google.cloud.operators.compute.GceInstanceTemplateCopyOperator`` - * - ``airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator`` - - ``airflow.providers.google.cloud.operators.compute.GceSetMachineTypeOperator`` - * - ``airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator`` - - ``airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator`` - * - ``airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator`` - - ``airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator`` - * - ``airflow.contrib.operators.gcp_container_operator.GKEPodOperator`` - - ``airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPCancelDLPJobOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPCancelDLPJobOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPCreateDLPJobOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDLPJobOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPCreateDeidentifyTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPCreateInspectTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPCreateInspectTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPCreateJobTriggerOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPCreateJobTriggerOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPCreateStoredInfoTypeOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPCreateStoredInfoTypeOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPDeidentifyContentOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPDeidentifyContentOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPDeleteDeidentifyTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPDeleteDlpJobOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDLPJobOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPDeleteInspectTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteInspectTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPDeleteJobTriggerOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteJobTriggerOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPDeleteStoredInfoTypeOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteStoredInfoTypeOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPGetDeidentifyTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPGetDlpJobOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPGetInspectTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPGetInspectTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPGetJobTripperOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPGetJobTriggerOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPGetStoredInfoTypeOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPInspectContentOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPInspectContentOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPListDeidentifyTemplatesOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPListDlpJobsOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPListDLPJobsOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPListInfoTypesOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPListInfoTypesOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPListInspectTemplatesOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPListInspectTemplatesOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPListJobTriggersOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPListJobTriggersOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPListStoredInfoTypesOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPRedactImageOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPRedactImageOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPReidentifyContentOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPReidentifyContentOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPUpdateDeidentifyTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPUpdateInspectTemplateOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPUpdateJobTriggerOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateJobTriggerOperator`` - * - ``airflow.contrib.operators.gcp_dlp_operator.CloudDLPUpdateStoredInfoTypeOperator`` - - ``airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateStoredInfoTypeOperator`` - * - ``airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator`` - - ``airflow.providers.google.cloud.operators.functions.GcfFunctionDeleteOperator`` - * - ``airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator`` - - ``airflow.providers.google.cloud.operators.functions.GcfFunctionDeployOperator`` - * - ``airflow.contrib.operators.gcp_natural_language_operator.CloudNaturalLanguageAnalyzeEntitiesOperator`` - - ``airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator`` - * - ``airflow.contrib.operators.gcp_natural_language_operator.CloudNaturalLanguageAnalyzeEntitySentimentOperator`` - - ``airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator`` - * - ``airflow.contrib.operators.gcp_natural_language_operator.CloudNaturalLanguageAnalyzeSentimentOperator`` - - ``airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator`` - * - ``airflow.contrib.operators.gcp_natural_language_operator.CloudNaturalLanguageClassifyTextOperator`` - - ``airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator`` - * - ``airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator`` - - ``airflow.providers.google.cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator`` - * - ``airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator`` - - ``airflow.providers.google.cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator`` - * - ``airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator`` - - ``airflow.providers.google.cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator`` - * - ``airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator`` - - ``airflow.providers.google.cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator`` - * - ``airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator`` - - ``airflow.providers.google.cloud.operators.spanner.SpannerDeleteInstanceOperator`` - * - ``airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator`` - - ``airflow.providers.google.cloud.operators.spanner.SpannerDeployInstanceOperator`` - * - ``airflow.contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator`` - - ``airflow.providers.google.cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator`` - * - ``airflow.contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator`` - - ``airflow.providers.google.cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceDeleteJobOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceUpdateJobOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCancelOperationOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGetOperationOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServicePauseOperationOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceResumeOperationOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceListOperationsOperator`` - * - ``airflow.contrib.operators.gcp_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator`` - - ``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator`` - * - ``airflow.contrib.operators.gcp_translate_operator.CloudTranslateTextOperator`` - - ``airflow.providers.google.cloud.operators.translate.CloudTranslateTextOperator`` - * - ``airflow.contrib.operators.gcp_translate_speech_operator.GcpTranslateSpeechOperator`` - - ``airflow.providers.google.cloud.operators.translate_speech.GcpTranslateSpeechOperator`` - * - ``airflow.contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoExplicitContentOperator`` - - ``airflow.providers.google.cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoExplicitContentOperator`` - * - ``airflow.contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoLabelsOperator`` - - ``airflow.providers.google.cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoLabelsOperator`` - * - ``airflow.contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoShotsOperator`` - - ``airflow.providers.google.cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoShotsOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionAddProductToProductSetOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionDetectDocumentTextOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionTextDetectOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionDetectImageLabelsOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageLabelsOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionDetectImageSafeSearchOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageSafeSearchOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionDetectTextOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionDetectTextOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionGetProductOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductSetOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductSetOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionGetProductSetOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductSetOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionCreateReferenceImageOperator`` - * - ``airflow.contrib.operators.gcp_vision_operator.CloudVisionRemoveProductFromProductSetOperator`` - - ``airflow.providers.google.cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator`` - * - ``airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator`` - - ``airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`` - * - ``airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator`` - - ``airflow.providers.google.cloud.operators.gcs.GCSObjectCreateAclEntryOperator`` - * - ``airflow.contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator`` - - ``airflow.providers.google.cloud.operators.gcs.GCSDeleteObjectsOperator`` - * - ``airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator`` - - ``airflow.providers.google.cloud.operators.gcs.GCSToLocalFilesystemOperator`` - * - ``airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator`` - - ``airflow.providers.google.cloud.operators.gcs.GCSListObjectsOperator`` - * - ``airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator`` - - ``airflow.providers.google.cloud.operators.gcs.GCSCreateBucketOperator`` - * - ``airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator`` - - ``airflow.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator`` - * - ``airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator`` - - ``airflow.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator`` - * - ``airflow.contrib.operators.gcs_to_s3.GoogleCloudStorageToS3Operator`` - - ``airflow.operators.gcs_to_s3.GCSToS3Operator`` - * - ``airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator`` - - ``airflow.providers.google.cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator`` - * - ``airflow.contrib.operators.mlengine_operator.MLEngineModelOperator`` - - ``airflow.providers.google.cloud.operators.mlengine.MLEngineManageModelOperator`` - * - ``airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator`` - - ``airflow.providers.google.cloud.operators.mlengine.MLEngineStartTrainingJobOperator`` - * - ``airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator`` - - ``airflow.providers.google.cloud.operators.mlengine.MLEngineManageVersionOperator`` - * - ``airflow.contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator`` - - ``airflow.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator`` - * - ``airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator`` - - ``airflow.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator`` - * - ``airflow.contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator`` - - ``airflow.operators.postgres_to_gcs.PostgresToGoogleCloudStorageOperator`` - * - ``airflow.contrib.operators.pubsub_operator.PubSubPublishOperator`` - - ``airflow.providers.google.cloud.operators.pubsub.PubSubPublishMessageOperator`` - * - ``airflow.contrib.operators.pubsub_operator.PubSubSubscriptionCreateOperator`` - - ``airflow.providers.google.cloud.operators.pubsub.PubSubCreateSubscriptionOperator`` - * - ``airflow.contrib.operators.pubsub_operator.PubSubSubscriptionDeleteOperator`` - - ``airflow.providers.google.cloud.operators.pubsub.PubSubDeleteSubscriptionOperator`` - * - ``airflow.contrib.operators.pubsub_operator.PubSubTopicCreateOperator`` - - ``airflow.providers.google.cloud.operators.pubsub.PubSubCreateTopicOperator`` - * - ``airflow.contrib.operators.pubsub_operator.PubSubTopicDeleteOperator`` - - ``airflow.providers.google.cloud.operators.pubsub.PubSubDeleteTopicOperator`` - * - ``airflow.contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator`` - - ``airflow.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator`` - * - ``airflow.contrib.sensors.bigquery_sensor.BigQueryTableSensor`` - - ``airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor`` - * - ``airflow.contrib.sensors.gcp_transfer_sensor.GCPTransferServiceWaitForJobStatusSensor`` - - ``airflow.providers.google.cloud.sensors.cloud_storage_transfer_service.DataTransferServiceJobStatusSensor`` - * - ``airflow.contrib.sensors.gcs_sensor.GoogleCloudStorageObjectSensor`` - - ``airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor`` - * - ``airflow.contrib.sensors.gcs_sensor.GoogleCloudStorageObjectUpdatedSensor`` - - ``airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor`` - * - ``airflow.contrib.sensors.gcs_sensor.GoogleCloudStoragePrefixSensor`` - - ``airflow.providers.google.cloud.sensors.gcs.GCSObjectsWithPrefixExistenceSensor`` - * - ``airflow.contrib.sensors.gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor`` - - ``airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor`` - * - ``airflow.contrib.sensors.pubsub_sensor.PubSubPullSensor`` - - ``airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor`` - - -Unify default conn_id for Google Cloud -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Previously not all hooks and operators related to Google Cloud use -``google_cloud_default`` as a default conn_id. There is currently one default -variant. Values like ``google_cloud_storage_default``\ , ``bigquery_default``\ , -``google_cloud_datastore_default`` have been deprecated. The configuration of -existing relevant connections in the database have been preserved. To use those -deprecated GCP conn_id, you need to explicitly pass their conn_id into -operators/hooks. Otherwise, ``google_cloud_default`` will be used as GCP's conn_id -by default. - -``airflow.providers.google.cloud.hooks.dataflow.DataflowHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.google.cloud.operators.dataflow.DataflowTemplatedJobStartOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To use project_id argument consistently across GCP hooks and operators, we did the following changes: - - -* Changed order of arguments in DataflowHook.start_python_dataflow. Uses - with positional arguments may break. -* Changed order of arguments in DataflowHook.is_job_dataflow_running. Uses - with positional arguments may break. -* Changed order of arguments in DataflowHook.cancel_job. Uses - with positional arguments may break. -* Added optional project_id argument to DataflowCreateJavaJobOperator - constructor. -* Added optional project_id argument to DataflowTemplatedJobStartOperator - constructor. -* Added optional project_id argument to DataflowCreatePythonJobOperator - constructor. - -``airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To provide more precise control in handling of changes to objects in -underlying GCS Bucket the constructor of this sensor now has changed. - - -* Old Behavior: This constructor used to optionally take ``previous_num_objects: int``. -* New replacement constructor kwarg: ``previous_objects: Optional[Set[str]]``. - -Most users would not specify this argument because the bucket begins empty -and the user wants to treat any files as new. - -Example of Updating usage of this sensor: -Users who used to call: - -``GCSUploadSessionCompleteSensor(bucket='my_bucket', prefix='my_prefix', previous_num_objects=1)`` - -Will now call: - -``GCSUploadSessionCompleteSensor(bucket='my_bucket', prefix='my_prefix', previous_num_objects={'.keep'})`` - -Where '.keep' is a single file at your prefix that the sensor should not consider new. - -``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To simplify BigQuery operators (no need of ``Cursor``\ ) and standardize usage of hooks within all GCP integration methods from ``BiqQueryBaseCursor`` -were moved to ``BigQueryHook``. Using them by from ``Cursor`` object is still possible due to preserved backward compatibility but they will raise ``DeprecationWarning``. -The following methods were moved: - -.. list-table:: - :header-rows: 1 - - * - Old path - - New path - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.cancel_query`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.cancel_query`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_dataset`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_dataset`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_table`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_table`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_external_table`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_external_table`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.delete_dataset`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.delete_dataset`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset_tables`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset_tables_list`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables_list`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_datasets_list`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_datasets_list`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_schema`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_schema`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_tabledata`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_tabledata`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.insert_all`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_all`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.patch_dataset`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_dataset`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.patch_table`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_table`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.poll_job_complete`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.poll_job_complete`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_copy`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_copy`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_extract`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_extract`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_grant_dataset_view_access`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_grant_dataset_view_access`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_load`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_load`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_query`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_query`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_delete`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_delete`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_upsert`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_upsert`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_with_configuration`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_with_configuration`` - * - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.update_dataset`` - - ``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.update_dataset`` +As a result, configuration options set with upper case characters in the section or key were unreachable. +That's why we are now converting section and key to lower case in ``conf.set`` too. +We also changed a bit the behavior of ``conf.get()``. It used to allow objects that are not strings in the section or key. +Doing this will now result in an exception. For instance, ``conf.get("section", 123)`` needs to be replaced with ``conf.get("section", "123")``. -``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" +- Ensure that tasks wait for running indirect setup (#33903) +- Respect "soft_fail" for core async sensors (#33403) +- Differentiate 0 and unset as a default param values (#33965) +- Raise 404 from Variable PATCH API if variable is not found (#33885) +- Fix ``MappedTaskGroup`` tasks not respecting upstream dependency (#33732) +- Add limit 1 if required first value from query result (#33672) +- Fix UI DAG counts including deleted DAGs (#33778) +- Fix cleaning zombie RESTARTING tasks (#33706) +- ``SECURITY_MANAGER_CLASS`` should be a reference to class, not a string (#33690) +- Add back ``get_url_for_login`` in security manager (#33660) +- Fix ``2.7.0 db`` migration job errors (#33652) +- Set context inside templates (#33645) +- Treat dag-defined access_control as authoritative if defined (#33632) +- Bind engine before attempting to drop archive tables (#33622) +- Add a fallback in case no first name and last name are set (#33617) +- Sort data before ``groupby`` in TIS duration calculation (#33535) +- Stop adding values to rendered templates UI when there is no dagrun (#33516) +- Set strict to True when parsing dates in webserver views (#33512) +- Use ``dialect.name`` in custom SA types (#33503) +- Do not return ongoing dagrun when a ``end_date`` is less than ``utcnow`` (#33488) +- Fix a bug in ``formatDuration`` method (#33486) +- Make ``conf.set`` case insensitive (#33452) +- Allow timetable to slightly miss catchup cutoff (#33404) +- Respect ``soft_fail`` argument when ``poke`` is called (#33401) +- Create a new method used to resume the task in order to implement specific logic for operators (#33424) +- Fix DagFileProcessor interfering with dags outside its ``processor_subdir`` (#33357) +- Remove the unnecessary ``
`` text in Provider's view (#33326) +- Respect ``soft_fail`` argument when ExternalTaskSensor runs in deferrable mode (#33196) +- Fix handling of default value and serialization of Param class (#33141) +- Check if the dynamically-added index is in the table schema before adding (#32731) +- Fix rendering the mapped parameters when using ``expand_kwargs`` method (#32272) +- Fix dependencies for celery and opentelemetry for Python 3.8 (#33579) -Since BigQuery is the part of the GCP it was possible to simplify the code by handling the exceptions -by usage of the ``airflow.providers.google.common.hooks.base.GoogleBaseHook.catch_http_exception`` decorator however it changes -exceptions raised by the following methods: +Misc/Internal +""""""""""""" +- Bring back ``Pydantic`` 1 compatibility (#34081, #33998) +- Use a trimmed version of README.md for PyPI (#33637) +- Upgrade to ``Pydantic`` 2 (#33956) +- Reorganize ``devel_only`` extra in Airflow's setup.py (#33907) +- Bumping ``FAB`` to ``4.3.4`` in order to fix issues with filters (#33931) +- Add minimum requirement for ``sqlalchemy to 1.4.24`` (#33892) +- Update version_added field for configs in config file (#33509) +- Replace ``OrderedDict`` with plain dict (#33508) +- Consolidate import and usage of itertools (#33479) +- Static check fixes (#33462) +- Import utc from datetime and normalize its import (#33450) +- D401 Support (#33352, #33339, #33337, #33336, #33335, #33333, #33338) +- Fix some missing type hints (#33334) +- D205 Support - Stragglers (#33301, #33298, #33297) +- Refactor: Simplify code (#33160, #33270, #33268, #33267, #33266, #33264, #33292, #33453, #33476, #33567, + #33568, #33480, #33753, #33520, #33623) +- Fix ``Pydantic`` warning about ``orm_mode`` rename (#33220) +- Add MySQL 8.1 to supported versions. (#33576) +- Remove ``Pydantic`` limitation for version < 2 (#33507) +Doc only changes +""""""""""""""""" +- Add documentation explaining template_ext (and how to override it) (#33735) +- Explain how users can check if python code is top-level (#34006) +- Clarify that DAG authors can also run code in DAG File Processor (#33920) +- Fix broken link in Modules Management page (#33499) +- Fix secrets backend docs (#33471) +- Fix config description for base_log_folder (#33388) -* ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.run_table_delete`` raises ``AirflowException`` instead of ``Exception``. -* ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.create_empty_dataset`` raises ``AirflowException`` instead of ``ValueError``. -* ``airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor.get_dataset`` raises ``AirflowException`` instead of ``ValueError``. -``airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow 2.7.0 (2023-08-18) +-------------------------- -``airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Idempotency was added to ``BigQueryCreateEmptyTableOperator`` and ``BigQueryCreateEmptyDatasetOperator``. -But to achieve that try / except clause was removed from ``create_empty_dataset`` and ``create_empty_table`` -methods of ``BigQueryHook``. +Remove Python 3.7 support (#30963) +"""""""""""""""""""""""""""""""""" +As of now, Python 3.7 is no longer supported by the Python community. +Therefore, to use Airflow 2.7.0, you must ensure your Python version is +either 3.8, 3.9, 3.10, or 3.11. -``airflow.providers.google.cloud.hooks.dataflow.DataflowHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Old Graph View is removed (#32958) +"""""""""""""""""""""""""""""""""" +The old Graph View is removed. The new Graph View is the default view now. -``airflow.providers.google.cloud.hooks.mlengine.MLEngineHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The trigger UI form is skipped in web UI if no parameters are defined in a DAG (#33351) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -``airflow.providers.google.cloud.hooks.pubsub.PubSubHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +If you are using ``dag_run.conf`` dictionary and web UI JSON entry to run your DAG you should either: -The change in GCP operators implies that GCP Hooks for those operators require now keyword parameters rather -than positional ones in all methods where ``project_id`` is used. The methods throw an explanatory exception -in case they are called using positional parameters. +* `Add params to your DAG `_ +* Enable the new configuration ``show_trigger_form_if_no_params`` to bring back old behaviour -Other GCP hooks are unaffected. +The "db init", "db upgrade" commands and "[database] load_default_connections" configuration options are deprecated (#33136). +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Instead, you should use "airflow db migrate" command to create or upgrade database. This command will not create default connections. +In order to create default connections you need to run "airflow connections create-default-connections" explicitly, +after running "airflow db migrate". -``airflow.providers.google.cloud.hooks.pubsub.PubSubHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +In case of SMTP SSL connection, the context now uses the "default" context (#33070) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +The "default" context is Python's ``default_ssl_contest`` instead of previously used "none". The +``default_ssl_context`` provides a balance between security and compatibility but in some cases, +when certificates are old, self-signed or misconfigured, it might not work. This can be configured +by setting "ssl_context" in "email" configuration of Airflow. -``airflow.providers.google.cloud.operators.pubsub.PubSubTopicCreateOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Setting it to "none" brings back the "none" setting that was used in Airflow 2.6 and before, +but it is not recommended due to security reasons ad this setting disables validation of certificates and allows MITM attacks. -``airflow.providers.google.cloud.operators.pubsub.PubSubSubscriptionCreateOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Disable default allowing the testing of connections in UI, API and CLI(#32052) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +For security reasons, the test connection functionality is disabled by default across Airflow UI, +API and CLI. The availability of the functionality can be controlled by the +``test_connection`` flag in the ``core`` section of the Airflow +configuration (``airflow.cfg``). It can also be controlled by the +environment variable ``AIRFLOW__CORE__TEST_CONNECTION``. -``airflow.providers.google.cloud.operators.pubsub.PubSubTopicDeleteOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.google.cloud.operators.pubsub.PubSubSubscriptionDeleteOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The following values are accepted for this config param: +1. ``Disabled``: Disables the test connection functionality and +disables the Test Connection button in the UI. -``airflow.providers.google.cloud.operators.pubsub.PubSubPublishOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +This is also the default value set in the Airflow configuration. +2. ``Enabled``: Enables the test connection functionality and +activates the Test Connection button in the UI. -``airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +3. ``Hidden``: Disables the test connection functionality and +hides the Test Connection button in UI. -In the ``PubSubPublishOperator`` and ``PubSubHook.publish`` method the data field in a message should be bytestring (utf-8 encoded) rather than base64 encoded string. +For more information on capabilities of users, see the documentation: +https://airflow.apache.org/docs/apache-airflow/stable/security/security_model.html#capabilities-of-authenticated-ui-users +It is strongly advised to **not** enable the feature until you make sure that only +highly trusted UI/API users have "edit connection" permissions. -Due to the normalization of the parameters within GCP operators and hooks a parameters like ``project`` or ``topic_project`` -are deprecated and will be substituted by parameter ``project_id``. -In ``PubSubHook.create_subscription`` hook method in the parameter ``subscription_project`` is replaced by ``subscription_project_id``. -Template fields are updated accordingly and old ones may not work. +The ``xcomEntries`` API disables support for the ``deserialize`` flag by default (#32176) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +For security reasons, the ``/dags/*/dagRuns/*/taskInstances/*/xcomEntries/*`` +API endpoint now disables the ``deserialize`` option to deserialize arbitrary +XCom values in the webserver. For backward compatibility, server admins may set +the ``[api] enable_xcom_deserialize_support`` config to *True* to enable the +flag and restore backward compatibility. -It is required now to pass key-word only arguments to ``PubSub`` hook. +However, it is strongly advised to **not** enable the feature, and perform +deserialization at the client side instead. -These changes are not backward compatible. +Change of the default Celery application name (#32526) +"""""""""""""""""""""""""""""""""""""""""""""""""""""" +Default name of the Celery application changed from ``airflow.executors.celery_executor`` to ``airflow.providers.celery.executors.celery_executor``. -``airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +You should change both your configuration and Health check command to use the new name: + * in configuration (``celery_app_name`` configuration in ``celery`` section) use ``airflow.providers.celery.executors.celery_executor`` + * in your Health check command use ``airflow.providers.celery.executors.celery_executor.app`` -The gcp_conn_id parameter in GKEPodOperator is required. In previous versions, it was possible to pass -the ``None`` value to the ``gcp_conn_id`` in the GKEStartPodOperator -operator, which resulted in credentials being determined according to the -`Application Default Credentials `_ strategy. -Now this parameter requires a value. To restore the previous behavior, configure the connection without -specifying the service account. +The default value for ``scheduler.max_tis_per_query`` is changed from 512 to 16 (#32572) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +This change is expected to make the Scheduler more responsive. -Detailed information about connection management is available: -`Google Cloud Connection `_. +``scheduler.max_tis_per_query`` needs to be lower than ``core.parallelism``. +If both were left to their default value previously, the effective default value of ``scheduler.max_tis_per_query`` was 32 +(because it was capped at ``core.parallelism``). -``airflow.providers.google.cloud.hooks.gcs.GCSHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +To keep the behavior as close as possible to the old config, one can set ``scheduler.max_tis_per_query = 0``, +in which case it'll always use the value of ``core.parallelism``. +Some executors have been moved to corresponding providers (#32767) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +In order to use the executors, you need to install the providers: -* - The following parameters have been replaced in all the methods in GCSHook: +* for Celery executors you need to install ``apache-airflow-providers-celery`` package >= 3.3.0 +* for Kubernetes executors you need to install ``apache-airflow-providers-cncf-kubernetes`` package >= 7.4.0 +* For Dask executors you need to install ``apache-airflow-providers-daskexecutor`` package in any version +You can achieve it also by installing airflow with ``[celery]``, ``[cncf.kubernetes]``, ``[daskexecutor]`` extras respectively. - * ``bucket`` is changed to ``bucket_name`` - * ``object`` is changed to ``object_name`` +Users who base their images on the ``apache/airflow`` reference image (not slim) should be unaffected - the base +reference image comes with all the three providers installed. -* - The ``maxResults`` parameter in ``GoogleCloudStorageHook.list`` has been renamed to ``max_results`` for consistency. +Improvement Changes +^^^^^^^^^^^^^^^^^^^ -``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +PostgreSQL only improvement: Added index on taskinstance table (#30762) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +This index seems to have great positive effect in a setup with tens of millions such rows. -``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +New Features +"""""""""""" +- Add OpenTelemetry to Airflow (`AIP-49 `_) +- Trigger Button - Implement Part 2 of AIP-50 (#31583) +- Removing Executor Coupling from Core Airflow (`AIP-51 `_) +- Automatic setup and teardown tasks (`AIP-52 `_) +- OpenLineage in Airflow (`AIP-53 `_) +- Experimental: Add a cache to Variable and Connection when called at dag parsing time (#30259) +- Enable pools to consider deferred tasks (#32709) +- Allows to choose SSL context for SMTP connection (#33070) +- New gantt tab (#31806) +- Load plugins from providers (#32692) +- Add ``BranchExternalPythonOperator`` (#32787, #33360) +- Add option for storing configuration description in providers (#32629) +- Introduce Heartbeat Parameter to Allow ``Per-LocalTaskJob`` Configuration (#32313) +- Add Executors discovery and documentation (#32532) +- Add JobState for job state constants (#32549) +- Add config to disable the 'deserialize' XCom API flag (#32176) +- Show task instance in web UI by custom operator name (#31852) +- Add default_deferrable config (#31712) +- Introducing ``AirflowClusterPolicySkipDag`` exception (#32013) +- Use ``reactflow`` for datasets graph (#31775) +- Add an option to load the dags from db for command tasks run (#32038) +- Add version of ``chain`` which doesn't require matched lists (#31927) +- Use operator_name instead of task_type in UI (#31662) +- Add ``--retry`` and ``--retry-delay`` to ``airflow db check`` (#31836) +- Allow skipped task state task_instance_schema.py (#31421) +- Add a new config for celery result_backend engine options (#30426) +- UI Add Cluster Activity Page (#31123, #32446) +- Adding keyboard shortcuts to common actions (#30950) +- Adding more information to kubernetes executor logs (#29929) +- Add support for configuring custom alembic file (#31415) +- Add running and failed status tab for DAGs on the UI (#30429) +- Add multi-select, proposals and labels for trigger form (#31441) +- Making webserver config customizable (#29926) +- Render DAGCode in the Grid View as a tab (#31113) +- Add rest endpoint to get option of configuration (#31056) +- Add ``section`` query param in get config rest API (#30936) +- Create metrics to track ``Scheduled->Queued->Running`` task state transition times (#30612) +- Mark Task Groups as Success/Failure (#30478) +- Add CLI command to list the provider trigger info (#30822) +- Add Fail Fast feature for DAGs (#29406) -``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Improvements +"""""""""""" +- Improve graph nesting logic (#33421) +- Configurable health check threshold for triggerer (#33089, #33084) +- add dag_run_ids and task_ids filter for the batch task instance API endpoint (#32705) +- Ensure DAG-level references are filled on unmap (#33083) +- Add support for arrays of different data types in the Trigger Form UI (#32734) +- Always show gantt and code tabs (#33029) +- Move listener success hook to after SQLAlchemy commit (#32988) +- Rename ``db upgrade`` to ``db migrate`` and add ``connections create-default-connections`` (#32810, #33136) +- Remove old gantt chart and redirect to grid views gantt tab (#32908) +- Adjust graph zoom based on selected task (#32792) +- Call listener on_task_instance_running after rendering templates (#32716) +- Display execution_date in graph view task instance tooltip. (#32527) +- Allow configuration to be contributed by providers (#32604, #32755, #32812) +- Reduce default for max TIs per query, enforce ``<=`` parallelism (#32572) +- Store config description in Airflow configuration object (#32669) +- Use ``isdisjoint`` instead of ``not intersection`` (#32616) +- Speed up calculation of leaves and roots for task groups (#32592) +- Kubernetes Executor Load Time Optimizations (#30727) +- Save DAG parsing time if dag is not schedulable (#30911) +- Updates health check endpoint to include ``dag_processor`` status. (#32382) +- Disable default allowing the testing of connections in UI, API and CLI (#32052, #33342) +- Fix config var types under the scheduler section (#32132) +- Allow to sort Grid View alphabetically (#32179) +- Add hostname to triggerer metric ``[triggers.running]`` (#32050) +- Improve DAG ORM cleanup code (#30614) +- ``TriggerDagRunOperator``: Add ``wait_for_completion`` to ``template_fields`` (#31122) +- Open links in new tab that take us away from Airflow UI (#32088) +- Only show code tab when a task is not selected (#31744) +- Add descriptions for celery and dask cert configs (#31822) +- ``PythonVirtualenvOperator`` termination log in alert (#31747) +- Migration of all DAG details to existing grid view dag details panel (#31690) +- Add a diagram to help visualize timer metrics (#30650) +- Celery Executor load time optimizations (#31001) +- Update code style for ``airflow db`` commands to SQLAlchemy 2.0 style (#31486) +- Mark uses of md5 as "not-used-for-security" in FIPS environments (#31171) +- Add pydantic support to serde (#31565) +- Enable search in note column in DagRun and TaskInstance (#31455) +- Save scheduler execution time by adding new Index idea for dag_run (#30827) +- Save scheduler execution time by caching dags (#30704) +- Support for sorting DAGs by Last Run Date in the web UI (#31234) +- Better typing for Job and JobRunners (#31240) +- Add sorting logic by created_date for fetching triggers (#31151) +- Remove DAGs.can_create on access control doc, adjust test fixture (#30862) +- Split Celery logs into stdout/stderr (#30485) +- Decouple metrics clients and ``validators`` into their own modules (#30802) +- Description added for pagination in ``get_log`` api (#30729) +- Optimize performance of scheduling mapped tasks (#30372) +- Add sentry transport configuration option (#30419) +- Better message on deserialization error (#30588) -``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +""""""""" +- Remove user sessions when resetting password (#33347) +- ``Gantt chart:`` Use earliest/oldest ti dates if different than dag run start/end (#33215) +- Fix ``virtualenv`` detection for Python ``virtualenv`` operator (#33223) +- Correctly log when there are problems trying to ``chmod`` ``airflow.cfg`` (#33118) +- Pass app context to webserver_config.py (#32759) +- Skip served logs for non-running task try (#32561) +- Fix reload gunicorn workers (#32102) +- Fix future DagRun rarely triggered by race conditions when ``max_active_runs`` reached its upper limit. (#31414) +- Fix BaseOperator ``get_task_instances`` query (#33054) +- Fix issue with using the various state enum value in logs (#33065) +- Use string concatenation to prepend base URL for log_url (#33063) +- Update graph nodes with operator style attributes (#32822) +- Affix webserver access_denied warning to be configurable (#33022) +- Only load task action modal if user can edit (#32992) +- OpenAPI Spec fix nullable alongside ``$ref`` (#32887) +- Make the decorators of ``PythonOperator`` sub-classes extend its decorator (#32845) +- Fix check if ``virtualenv`` is installed in ``PythonVirtualenvOperator`` (#32939) +- Unwrap Proxy before checking ``__iter__`` in is_container() (#32850) +- Override base log folder by using task handler's base_log_folder (#32781) +- Catch arbitrary exception from run_job to prevent zombie scheduler (#32707) +- Fix depends_on_past work for dynamic tasks (#32397) +- Sort extra_links for predictable order in UI. (#32762) +- Fix prefix group false graph (#32764) +- Fix bad delete logic for dagruns (#32684) +- Fix bug in prune_dict where empty dict and list would be removed even in strict mode (#32573) +- Add explicit browsers list and correct rel for blank target links (#32633) +- Handle returned None when multiple_outputs is True (#32625) +- Fix returned value when ShortCircuitOperator condition is falsy and there is not downstream tasks (#32623) +- Fix returned value when ShortCircuitOperator condition is falsy (#32569) +- Fix rendering of ``dagRunTimeout`` (#32565) +- Fix permissions on ``/blocked`` endpoint (#32571) +- Bugfix, prevent force of unpause on trigger DAG (#32456) +- Fix data interval in ``cli.dags.trigger`` command output (#32548) +- Strip ``whitespaces`` from airflow connections form (#32292) +- Add timedelta support for applicable arguments of sensors (#32515) +- Fix incorrect default on ``readonly`` property in our API (#32510) +- Add xcom map_index as a filter to xcom endpoint (#32453) +- Fix CLI commands when custom timetable is used (#32118) +- Use WebEncoder to encode DagRun.conf in DagRun's list view (#32385) +- Fix logic of the skip_all_except method (#31153) +- Ensure dynamic tasks inside dynamic task group only marks the (#32354) +- Handle the cases that webserver.expose_config is set to non-sensitive-only instead of boolean value (#32261) +- Add retry functionality for handling process termination caused by database network issues (#31998) +- Adapt Notifier for sla_miss_callback (#31887) +- Fix XCOM view (#31807) +- Fix for "Filter dags by tag" flickering on initial load of dags.html (#31578) +- Fix where expanding ``resizer`` would not expanse grid view (#31581) +- Fix MappedOperator-BaseOperator attr sync check (#31520) +- Always pass named ``type_`` arg to drop_constraint (#31306) +- Fix bad ``drop_constraint`` call in migrations (#31302) +- Resolving problems with redesigned grid view (#31232) +- Support ``requirepass`` redis sentinel (#30352) +- Fix webserver crash when calling get ``/config`` (#31057) -``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Misc/Internal +""""""""""""" +- Modify pathspec version restriction (#33349) +- Refactor: Simplify code in ``dag_processing`` (#33161) +- For now limit ``Pydantic`` to ``< 2.0.0`` (#33235) +- Refactor: Simplify code in models (#33181) +- Add elasticsearch group to pre-2.7 defaults (#33166) +- Refactor: Simplify dict manipulation in airflow/cli (#33159) +- Remove redundant dict.keys() call (#33158) +- Upgrade ruff to latest 0.0.282 version in pre-commits (#33152) +- Move openlineage configuration to provider (#33124) +- Replace State by TaskInstanceState in Airflow executors (#32627) +- Get rid of Python 2 numeric relics (#33050) +- Remove legacy dag code (#33058) +- Remove legacy task instance modal (#33060) +- Remove old graph view (#32958) +- Move CeleryExecutor to the celery provider (#32526, #32628) +- Move all k8S classes to ``cncf.kubernetes`` provider (#32767, #32891) +- Refactor existence-checking SQL to helper (#32790) +- Extract Dask executor to new daskexecutor provider (#32772) +- Remove atlas configuration definition (#32776) +- Add Redis task handler (#31855) +- Move writing configuration for webserver to main (webserver limited) (#32766) +- Improve getting the query count in Airflow API endpoints (#32630) +- Remove click upper bound (#32634) +- Add D400 ``pydocstyle`` check - core Airflow only (#31297) +- D205 Support (#31742, #32575, #32213, #32212, #32591, #32449, #32450) +- Bump word-wrap from ``1.2.3 to 1.2.4`` in ``/airflow/www`` (#32680) +- Strong-type all single-state enum values (#32537) +- More strong typed state conversion (#32521) +- SQL query improvements in utils/db.py (#32518) +- Bump semver from ``6.3.0 to 6.3.1`` in ``/airflow/www`` (#32506) +- Bump jsonschema version to ``4.18.0`` (#32445) +- Bump ``stylelint`` from ``13.13.1 to 15.10.1`` in ``/airflow/www`` (#32435) +- Bump tough-cookie from ``4.0.0 to 4.1.3`` in ``/airflow/www`` (#32443) +- upgrade flask-appbuilder (#32054) +- Support ``Pydantic`` 2 (#32366) +- Limit click until we fix mypy issues (#32413) +- A couple of minor cleanups (#31890) +- Replace State usages with strong-typed ``enums`` (#31735) +- Upgrade ruff to ``0.272`` (#31966) +- Better error message when serializing callable without name (#31778) +- Improve the views module a bit (#31661) +- Remove ``asynctest`` (#31664) +- Refactor sqlalchemy queries to ``2.0`` style (#31569, #31772, #32350, #32339, #32474, #32645) +- Remove Python ``3.7`` support (#30963) +- Bring back min-airflow-version for preinstalled providers (#31469) +- Docstring improvements (#31375) +- Improve typing in SchedulerJobRunner (#31285) +- Upgrade ruff to ``0.0.262`` (#30809) +- Upgrade to MyPy ``1.2.0`` (#30687) -``airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Docs only changes +""""""""""""""""" +- Clarify UI user types in security model (#33021) +- Add links to ``DAGRun / DAG / Task`` in templates-ref.rst (#33013) +- Add docs of how to test for DAG Import Errors (#32811) +- Clean-up of our new security page (#32951) +- Cleans up Extras reference page (#32954) +- Update Dag trigger API and command docs (#32696) +- Add deprecation info to the Airflow modules and classes docstring (#32635) +- Formatting installation doc to improve readability (#32502) +- Fix triggerer HA doc (#32454) +- Add type annotation to code examples (#32422) +- Document cron and delta timetables (#32392) +- Update index.rst doc to correct grammar (#32315) +- Fixing small typo in python.py (#31474) +- Separate out and clarify policies for providers (#30657) +- Fix docs: add an "apache" prefix to pip install (#30681) -The 'properties' and 'jars' properties for the Dataproc related operators (\ ``DataprocXXXOperator``\ ) have been renamed from -``dataproc_xxxx_properties`` and ``dataproc_xxx_jars`` to ``dataproc_properties`` -and ``dataproc_jars``\ respectively. -Arguments for dataproc_properties dataproc_jars -``airflow.providers.google.cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow 2.6.3 (2023-07-10) +-------------------------- -To obtain pylint compatibility the ``filter`` argument in ``CloudDataTransferServiceCreateJobOperator`` -has been renamed to ``request_filter``. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``airflow.providers.google.cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Default allowed pattern of a run_id has been changed to ``^[A-Za-z0-9_.~:+-]+$`` (#32293). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Previously, there was no validation on the run_id string. There is now a validation regex that +can be set by configuring ``allowed_run_id_pattern`` in ``scheduler`` section. - To obtain pylint compatibility the ``filter`` argument in ``CloudDataTransferServiceHook.list_transfer_job`` and - ``CloudDataTransferServiceHook.list_transfer_operations`` has been renamed to ``request_filter``. +Bug Fixes +""""""""" +- Use linear time regular expressions (#32303) +- Fix triggerers alive check and add a new conf for triggerer heartbeat rate (#32123) +- Catch the exception that triggerer initialization failed (#31999) +- Hide sensitive values from extra in connection edit form (#32309) +- Sanitize ``DagRun.run_id`` and allow flexibility (#32293) +- Add triggerer canceled log (#31757) +- Fix try number shown in the task view (#32361) +- Retry transactions on occasional deadlocks for rendered fields (#32341) +- Fix behaviour of LazyDictWithCache when import fails (#32248) +- Remove ``executor_class`` from Job - fixing backfill for custom executors (#32219) +- Fix bugged singleton implementation (#32218) +- Use ``mapIndex`` to display extra links per mapped task. (#32154) +- Ensure that main triggerer thread exits if the async thread fails (#32092) +- Use ``re2`` for matching untrusted regex (#32060) +- Render list items in rendered fields view (#32042) +- Fix hashing of ``dag_dependencies`` in serialized dag (#32037) +- Return ``None`` if an XComArg fails to resolve in a multiple_outputs Task (#32027) +- Check for DAG ID in query param from url as well as kwargs (#32014) +- Flash an error message instead of failure in ``rendered-templates`` when map index is not found (#32011) +- Fix ``ExternalTaskSensor`` when there is no task group TIs for the current execution date (#32009) +- Fix number param html type in trigger template (#31980, #31946) +- Fix masking nested variable fields (#31964) +- Fix ``operator_extra_links`` property serialization in mapped tasks (#31904) +- Decode old-style nested Xcom value (#31866) +- Add a check for trailing slash in webserver base_url (#31833) +- Fix connection uri parsing when the host includes a scheme (#31465) +- Fix database session closing with ``xcom_pull`` and ``inlets`` (#31128) +- Fix DAG's ``on_failure_callback`` is not invoked when task failed during testing dag. (#30965) +- Fix airflow module version check when using ``ExternalPythonOperator`` and debug logging level (#30367) -``airflow.providers.google.cloud.hooks.bigquery.BigQueryHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Misc/Internal +""""""""""""" +- Fix ``task.sensor`` annotation in type stub (#31954) +- Limit ``Pydantic`` to ``< 2.0.0`` until we solve ``2.0.0`` incompatibilities (#32312) +- Fix ``Pydantic`` 2 pickiness about model definition (#32307) -In general all hook methods are decorated with ``@GoogleBaseHook.fallback_to_default_project_id`` thus -parameters to hook can only be passed via keyword arguments. +Doc only changes +"""""""""""""""" +- Add explanation about tag creation and cleanup (#32406) +- Minor updates to docs (#32369, #32315, #32310, #31794) +- Clarify Listener API behavior (#32269) +- Add information for users who ask for requirements (#32262) +- Add links to DAGRun / DAG / Task in Templates Reference (#32245) +- Add comment to warn off a potential wrong fix (#32230) +- Add a note that we'll need to restart triggerer to reflect any trigger change (#32140) +- Adding missing hyperlink to the tutorial documentation (#32105) +- Added difference between Deferrable and Non-Deferrable Operators (#31840) +- Add comments explaining need for special "trigger end" log message (#31812) +- Documentation update on Plugin updates. (#31781) +- Fix SemVer link in security documentation (#32320) +- Update security model of Airflow (#32098) +- Update references to restructured documentation from Airflow core (#32282) +- Separate out advanced logging configuration (#32131) +- Add ``™`` to Airflow in prominent places (#31977) -* ``create_empty_table`` method accepts now ``table_resource`` parameter. If provided all - other parameters are ignored. -* ``create_empty_dataset`` will now use values from ``dataset_reference`` instead of raising error - if parameters were passed in ``dataset_reference`` and as arguments to method. Additionally validation - of ``dataset_reference`` is done using ``Dataset.from_api_repr``. Exception and log messages has been - changed. -* ``update_dataset`` requires now new ``fields`` argument (breaking change) -* ``delete_dataset`` has new signature (dataset_id, project_id, ...) - previous one was (project_id, dataset_id, ...) (breaking change) -* ``get_tabledata`` returns list of rows instead of API response in dict format. This method is deprecated in - favor of ``list_rows``. (breaking change) +Airflow 2.6.2 (2023-06-17) +-------------------------- -``airflow.providers.google.cloud.hooks.cloud_build.CloudBuildHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +No significant changes. -The ``api_version`` has been removed and will not be used since we migrate ``CloudBuildHook`` from using - Discovery API to native google-cloud-build python library. +Bug Fixes +^^^^^^^^^ +- Cascade update of TaskInstance to TaskMap table (#31445) +- Fix Kubernetes executors detection of deleted pods (#31274) +- Use keyword parameters for migration methods for mssql (#31309) +- Control permissibility of driver config in extra from airflow.cfg (#31754) +- Fixing broken links in openapi/v1.yaml (#31619) +- Hide old alert box when testing connection with different value (#31606) +- Add TriggererStatus to OpenAPI spec (#31579) +- Resolving issue where Grid won't un-collapse when Details is collapsed (#31561) +- Fix sorting of tags (#31553) +- Add the missing ``map_index`` to the xcom key when skipping downstream tasks (#31541) +- Fix airflow users delete CLI command (#31539) +- Include triggerer health status in Airflow ``/health`` endpoint (#31529) +- Remove dependency already registered for this task warning (#31502) +- Use kube_client over default CoreV1Api for deleting pods (#31477) +- Ensure min backoff in base sensor is at least 1 (#31412) +- Fix ``max_active_tis_per_dagrun`` for Dynamic Task Mapping (#31406) +- Fix error handling when pre-importing modules in DAGs (#31401) +- Fix dropdown default and adjust tutorial to use 42 as default for proof (#31400) +- Fix crash when clearing run with task from normal to mapped (#31352) +- Make BaseJobRunner a generic on the job class (#31287) +- Fix ``url_for_asset`` fallback and 404 on DAG Audit Log (#31233) +- Don't present an undefined execution date (#31196) +- Added spinner activity while the logs load (#31165) +- Include rediss to the list of supported URL schemes (#31028) +- Optimize scheduler by skipping "non-schedulable" DAGs (#30706) +- Save scheduler execution time during search for queued dag_runs (#30699) +- Fix ExternalTaskSensor to work correctly with task groups (#30742) +- Fix DAG.access_control can't sync when clean access_control (#30340) +- Fix failing get_safe_url tests for latest Python 3.8 and 3.9 (#31766) +- Fix typing for POST user endpoint (#31767) +- Fix wrong update for nested group default args (#31776) +- Fix overriding ``default_args`` in nested task groups (#31608) +- Mark ``[secrets] backend_kwargs`` as a sensitive config (#31788) +- Executor events are not always "exited" here (#30859) +- Validate connection IDs (#31140) -The ``body`` parameter in ``CloudBuildCreateBuildOperator`` has been deprecated. - Instead, you should pass body using the ``build`` parameter. +Misc/Internal +""""""""""""" +- Add Python 3.11 support (#27264) +- Replace unicodecsv with standard csv library (#31693) +- Bring back unicodecsv as dependency of Airflow (#31814) +- Remove found_descendents param from get_flat_relative_ids (#31559) +- Fix typing in external task triggers (#31490) +- Wording the next and last run DAG columns better (#31467) +- Skip auto-document things with :meta private: (#31380) +- Add an example for sql_alchemy_connect_args conf (#31332) +- Convert dask upper-binding into exclusion (#31329) +- Upgrade FAB to 4.3.1 (#31203) +- Added metavar and choices to --state flag in airflow dags list-jobs CLI for suggesting valid state arguments. (#31308) +- Use only one line for tmp dir log (#31170) +- Rephrase comment in setup.py (#31312) +- Add fullname to owner on logging (#30185) +- Make connection id validation consistent across interface (#31282) +- Use single source of truth for sensitive config items (#31820) -``airflow.providers.google.cloud.hooks.dataflow.DataflowHook.start_python_dataflow`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Doc only changes +^^^^^^^^^^^^^^^^ +- Add docstring and signature for _read_remote_logs (#31623) +- Remove note about triggerer being 3.7+ only (#31483) +- Fix version support information (#31468) +- Add missing BashOperator import to documentation example (#31436) +- Fix task.branch error caused by incorrect initial parameter (#31265) +- Update callbacks documentation (errors and context) (#31116) +- Add an example for dynamic task mapping with non-TaskFlow operator (#29762) +- Few doc fixes - links, grammar and wording (#31719) +- Add description in a few more places about adding airflow to pip install (#31448) +- Fix table formatting in docker build documentation (#31472) +- Update documentation for constraints installation (#31882) -``airflow.providers.google.cloud.hooks.dataflow.DataflowHook.start_python_dataflow`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow 2.6.1 (2023-05-16) +-------------------------- -``airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Change python3 as Dataflow Hooks/Operators default interpreter +Clarifications of the external Health Check mechanism and using ``Job`` classes (#31277). +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Now the ``py_interpreter`` argument for DataFlow Hooks/Operators has been changed from python2 to python3. +In the past SchedulerJob and other ``*Job`` classes are known to have been used to perform +external health checks for Airflow components. Those are, however, Airflow DB ORM related classes. +The DB models and database structure of Airflow are considered as internal implementation detail, following +`public interface `_). +Therefore, they should not be used for external health checks. Instead, you should use the +``airflow jobs check`` CLI command (introduced in Airflow 2.1) for that purpose. -``airflow.providers.google.common.hooks.base_google.GoogleBaseHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +^^^^^^^^^ +- Fix calculation of health check threshold for SchedulerJob (#31277) +- Fix timestamp parse failure for k8s executor pod tailing (#31175) +- Make sure that DAG processor job row has filled value in ``job_type`` column (#31182) +- Fix section name reference for ``api_client_retry_configuration`` (#31174) +- Ensure the KPO runs pod mutation hooks correctly (#31173) +- Remove worrying log message about redaction from the OpenLineage plugin (#31149) +- Move ``interleave_timestamp_parser`` config to the logging section (#31102) +- Ensure that we check worker for served logs if no local or remote logs found (#31101) +- Fix ``MappedTaskGroup`` import in taskinstance file (#31100) +- Format DagBag.dagbag_report() Output (#31095) +- Mask task attribute on task detail view (#31125) +- Fix template error when iterating None value and fix params documentation (#31078) +- Fix ``apache-hive`` extra so it installs the correct package (#31068) +- Fix issue with zip files in DAGs folder when pre-importing Airflow modules (#31061) +- Move TaskInstanceKey to a separate file to fix circular import (#31033, #31204) +- Fix deleting DagRuns and TaskInstances that have a note (#30987) +- Fix ``airflow providers get`` command output (#30978) +- Fix Pool schema in the OpenAPI spec (#30973) +- Add support for dynamic tasks with template fields that contain ``pandas.DataFrame`` (#30943) +- Use the Task Group explicitly passed to 'partial' if any (#30933) +- Fix ``order_by`` request in list DAG rest api (#30926) +- Include node height/width in center-on-task logic (#30924) +- Remove print from dag trigger command (#30921) +- Improve task group UI in new graph (#30918) +- Fix mapped states in grid view (#30916) +- Fix problem with displaying graph (#30765) +- Fix backfill KeyError when try_number out of sync (#30653) +- Re-enable clear and setting state in the TaskInstance UI (#30415) +- Prevent DagRun's ``state`` and ``start_date`` from being reset when clearing a task in a running DagRun (#30125) -To simplify the code, the decorator provide_gcp_credential_file has been moved from the inner-class. +Misc/Internal +""""""""""""" +- Upper bind dask until they solve a side effect in their test suite (#31259) +- Show task instances affected by clearing in a table (#30633) +- Fix missing models in API documentation (#31021) -Instead of ``@GoogleBaseHook._Decorators.provide_gcp_credential_file``\ , -you should write ``@GoogleBaseHook.provide_gcp_credential_file`` +Doc only changes +"""""""""""""""" +- Improve description of the ``dag_processing.processes`` metric (#30891) +- Improve Quick Start instructions (#30820) +- Add section about missing task logs to the FAQ (#30717) +- Mount the ``config`` directory in docker compose (#30662) +- Update ``version_added`` config field for ``might_contain_dag`` and ``metrics_allow_list`` (#30969) -``airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -It is highly recommended to have 1TB+ disk size for Dataproc to have sufficient throughput: -https://cloud.google.com/compute/docs/disks/performance +Airflow 2.6.0 (2023-04-30) +-------------------------- -Hence, the default value for ``master_disk_size`` in ``DataprocCreateClusterOperator`` has been changed from 500GB to 1TB. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Generating Cluster Config -""""""""""""""""""""""""" +Default permissions of file task handler log directories and files has been changed to "owner + group" writeable (#29506). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Default setting handles case where impersonation is needed and both users (airflow and the impersonated user) +have the same group set as main group. Previously the default was also other-writeable and the user might choose +to use the other-writeable setting if they wish by configuring ``file_task_handler_new_folder_permissions`` +and ``file_task_handler_new_file_permissions`` in ``logging`` section. -If you are upgrading from Airflow 1.10.x and are not using **CLUSTER_CONFIG**\ , -You can easily generate config using **make()** of ``airflow.providers.google.cloud.operators.dataproc.ClusterGenerator`` +SLA callbacks no longer add files to the dag processor manager's queue (#30076) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +This stops SLA callbacks from keeping the dag processor manager permanently busy. It means reduced CPU, +and fixes issues where SLAs stop the system from seeing changes to existing dag files. Additional metrics added to help track queue state. -This has been proved specially useful if you are using **metadata** argument from older API, refer `AIRFLOW-16911 `_ for details. +The ``cleanup()`` method in BaseTrigger is now defined as asynchronous (following async/await) pattern (#30152). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +This is potentially a breaking change for any custom trigger implementations that override the ``cleanup()`` +method and uses synchronous code, however using synchronous operations in cleanup was technically wrong, +because the method was executed in the main loop of the Triggerer and it was introducing unnecessary delays +impacting other triggers. The change is unlikely to affect any existing trigger implementations. -eg. your cluster creation may look like this in **v1.10.x** +The gauge ``scheduler.tasks.running`` no longer exist (#30374) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +The gauge has never been working and its value has always been 0. Having an accurate +value for this metric is complex so it has been decided that removing this gauge makes +more sense than fixing it with no certainty of the correctness of its value. -.. code-block:: python +Consolidate handling of tasks stuck in queued under new ``task_queued_timeout`` config (#30375) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Logic for handling tasks stuck in the queued state has been consolidated, and the all configurations +responsible for timing out stuck queued tasks have been deprecated and merged into +``[scheduler] task_queued_timeout``. The configurations that have been deprecated are +``[kubernetes] worker_pods_pending_timeout``, ``[celery] stalled_task_timeout``, and +``[celery] task_adoption_timeout``. If any of these configurations are set, the longest timeout will be +respected. For example, if ``[celery] stalled_task_timeout`` is 1200, and ``[scheduler] task_queued_timeout`` +is 600, Airflow will set ``[scheduler] task_queued_timeout`` to 1200. - path = f"gs://goog-dataproc-initialization-actions-us-central1/python/pip-install.sh" +Improvement Changes +^^^^^^^^^^^^^^^^^^^ - create_cluster = DataprocClusterCreateOperator( - task_id="create_dataproc_cluster", - cluster_name="test", - project_id="test", - zone="us-central1-a", - region="us-central1", - master_machine_type="n1-standard-4", - worker_machine_type="n1-standard-4", - num_workers=2, - storage_bucket="test_bucket", - init_actions_uris=[path], - metadata={"PIP_PACKAGES": "pyyaml requests pandas openpyxl"}, - ) +Display only the running configuration in configurations view (#28892) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +The configurations view now only displays the running configuration. Previously, the default configuration +was displayed at the top but it was not obvious whether this default configuration was overridden or not. +Subsequently, the non-documented endpoint ``/configuration?raw=true`` is deprecated and will be removed in +Airflow 3.0. The HTTP response now returns an additional ``Deprecation`` header. The ``/config`` endpoint on +the REST API is the standard way to fetch Airflow configuration programmatically. -After upgrading to **v2.x.x** and using **CLUSTER_CONFIG**\ , it will look like followed: +Explicit skipped states list for ExternalTaskSensor (#29933) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +ExternalTaskSensor now has an explicit ``skipped_states`` list -.. code-block:: python +Miscellaneous Changes +^^^^^^^^^^^^^^^^^^^^^ - path = f"gs://goog-dataproc-initialization-actions-us-central1/python/pip-install.sh" +Handle OverflowError on exponential backoff in next_run_calculation (#28172) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Maximum retry task delay is set to be 24h (86400s) by default. You can change it globally via ``core.max_task_retry_delay`` +parameter. - CLUSTER_CONFIG = ClusterGenerator( - project_id="test", - zone="us-central1-a", - master_machine_type="n1-standard-4", - worker_machine_type="n1-standard-4", - num_workers=2, - storage_bucket="test", - init_actions_uris=[path], - metadata={"PIP_PACKAGES": "pyyaml requests pandas openpyxl"}, - ).make() +Move Hive macros to the provider (#28538) +""""""""""""""""""""""""""""""""""""""""" +The Hive Macros (``hive.max_partition``, ``hive.closest_ds_partition``) are available only when Hive Provider is +installed. Please install Hive Provider > 5.1.0 when using those macros. - create_cluster_operator = DataprocClusterCreateOperator( - task_id="create_dataproc_cluster", - cluster_name="test", - project_id="test", - region="us-central1", - cluster_config=CLUSTER_CONFIG, - ) +Updated app to support configuring the caching hash method for FIPS v2 (#30675) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Various updates for FIPS-compliance when running Airflow in Python 3.9+. This includes a new webserver option, ``caching_hash_method``, +for changing the default flask caching method. -``airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetTablesOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +New Features +^^^^^^^^^^^^ +- AIP-50 Trigger DAG UI Extension with Flexible User Form Concept (#27063,#29376) +- Skip PythonVirtualenvOperator task when it returns a provided exit code (#30690) +- rename skip_exit_code to skip_on_exit_code and allow providing multiple codes (#30692) +- Add skip_on_exit_code also to ExternalPythonOperator (#30738) +- Add ``max_active_tis_per_dagrun`` for Dynamic Task Mapping (#29094) +- Add serializer for pandas dataframe (#30390) +- Deferrable ``TriggerDagRunOperator`` (#30292) +- Add command to get DAG Details via CLI (#30432) +- Adding ContinuousTimetable and support for @continuous schedule_interval (#29909) +- Allow customized rules to check if a file has dag (#30104) +- Add a new Airflow conf to specify a SSL ca cert for Kubernetes client (#30048) +- Bash sensor has an explicit retry code (#30080) +- Add filter task upstream/downstream to grid view (#29885) +- Add testing a connection via Airflow CLI (#29892) +- Support deleting the local log files when using remote logging (#29772) +- ``Blocklist`` to disable specific metric tags or metric names (#29881) +- Add a new graph inside of the grid view (#29413) +- Add database ``check_migrations`` config (#29714) +- add output format arg for ``cli.dags.trigger`` (#29224) +- Make json and yaml available in templates (#28930) +- Enable tagged metric names for existing Statsd metric publishing events | influxdb-statsd support (#29093) +- Add arg --yes to ``db export-archived`` command. (#29485) +- Make the policy functions pluggable (#28558) +- Add ``airflow db drop-archived`` command (#29309) +- Enable individual trigger logging (#27758) +- Implement new filtering options in graph view (#29226) +- Add triggers for ExternalTask (#29313) +- Add command to export purged records to CSV files (#29058) +- Add ``FileTrigger`` (#29265) +- Emit DataDog statsd metrics with metadata tags (#28961) +- Add some statsd metrics for dataset (#28907) +- Add --overwrite option to ``connections import`` CLI command (#28738) +- Add general-purpose "notifier" concept to DAGs (#28569) +- Add a new conf to wait past_deps before skipping a task (#27710) +- Add Flink on K8s Operator (#28512) +- Allow Users to disable SwaggerUI via configuration (#28354) +- Show mapped task groups in graph (#28392) +- Log FileTaskHandler to work with KubernetesExecutor's multi_namespace_mode (#28436) +- Add a new config for adapting masked secrets to make it easier to prevent secret leakage in logs (#28239) +- List specific config section and its values using the cli (#28334) +- KubernetesExecutor multi_namespace_mode can use namespace list to avoid requiring cluster role (#28047) +- Automatically save and allow restore of recent DAG run configs (#27805) +- Added exclude_microseconds to cli (#27640) -We changed signature of ``BigQueryGetDatasetTablesOperator``. +Improvements +"""""""""""" +- Rename most pod_id usage to pod_name in KubernetesExecutor (#29147) +- Update the error message for invalid use of poke-only sensors (#30821) +- Update log level in scheduler critical section edge case (#30694) +- AIP-51 Removing Executor Coupling from Core Airflow (`AIP-51 `__) +- Add multiple exit code handling in skip logic for BashOperator (#30739) +- Updated app to support configuring the caching hash method for FIPS v2 (#30675) +- Preload airflow imports before dag parsing to save time (#30495) +- Improve task & run actions ``UX`` in grid view (#30373) +- Speed up TaskGroups with caching property of group_id (#30284) +- Use the engine provided in the session (#29804) +- Type related import optimization for Executors (#30361) +- Add more type hints to the code base (#30503) +- Always use self.appbuilder.get_session in security managers (#30233) +- Update SQLAlchemy ``select()`` to new style (#30515) +- Refactor out xcom constants from models (#30180) +- Add exception class name to DAG-parsing error message (#30105) +- Rename statsd_allow_list and statsd_block_list to ``metrics_*_list`` (#30174) +- Improve serialization of tuples and sets (#29019) +- Make cleanup method in trigger an async one (#30152) +- Lazy load serialization modules (#30094) +- SLA callbacks no longer add files to the dag_processing manager queue (#30076) +- Add task.trigger rule to grid_data (#30130) +- Speed up log template sync by avoiding ORM (#30119) +- Separate cli_parser.py into two modules (#29962) +- Explicit skipped states list for ExternalTaskSensor (#29933) +- Add task state hover highlighting to new graph (#30100) +- Store grid tabs in url params (#29904) +- Use custom Connexion resolver to load lazily (#29992) +- Delay Kubernetes import in secret masker (#29993) +- Delay ConnectionModelView init until it's accessed (#29946) +- Scheduler, make stale DAG deactivation threshold configurable instead of using dag processing timeout (#29446) +- Improve grid view height calculations (#29563) +- Avoid importing executor during conf validation (#29569) +- Make permissions for FileTaskHandler group-writeable and configurable (#29506) +- Add colors in help outputs of Airflow CLI commands #28789 (#29116) +- Add a param for get_dags endpoint to list only unpaused dags (#28713) +- Expose updated_at filter for dag run and task instance endpoints (#28636) +- Increase length of user identifier columns (#29061) +- Update gantt chart UI to display queued state of tasks (#28686) +- Add index on log.dttm (#28944) +- Display only the running configuration in configurations view (#28892) +- Cap dropdown menu size dynamically (#28736) +- Added JSON linter to connection edit / add UI for field extra. On connection edit screen, existing extra data will be displayed indented (#28583) +- Use labels instead of pod name for pod log read in k8s exec (#28546) +- Use time not tries for queued & running re-checks. (#28586) +- CustomTTYColoredFormatter should inherit TimezoneAware formatter (#28439) +- Improve past depends handling in Airflow CLI tasks.run command (#28113) +- Support using a list of callbacks in ``on_*_callback/sla_miss_callbacks`` (#28469) +- Better table name validation for db clean (#28246) +- Use object instead of array in config.yml for config template (#28417) +- Add markdown rendering for task notes. (#28245) +- Show mapped task groups in grid view (#28208) +- Add ``renamed`` and ``previous_name`` in config sections (#28324) +- Speed up most Users/Role CLI commands (#28259) +- Speed up Airflow role list command (#28244) +- Refactor serialization (#28067, #30819, #30823) +- Allow longer pod names for k8s executor / KPO (#27736) +- Updates health check endpoint to include ``triggerer`` status (#27755) -Before: -.. code-block:: python +Bug Fixes +""""""""" +- Fix static_folder for cli app (#30952) +- Initialize plugins for cli appbuilder (#30934) +- Fix dag file processor heartbeat to run only if necessary (#30899) +- Fix KubernetesExecutor sending state to scheduler (#30872) +- Count mapped upstream only if all are finished (#30641) +- ExternalTaskSensor: add external_task_group_id to template_fields (#30401) +- Improve url detection for task instance details (#30779) +- Use material icons for dag import error banner (#30771) +- Fix misc grid/graph view UI bugs (#30752) +- Add a collapse grid button (#30711) +- Fix d3 dependencies (#30702) +- Simplify logic to resolve tasks stuck in queued despite stalled_task_timeout (#30375) +- When clearing task instances try to get associated DAGs from database (#29065) +- Fix mapped tasks partial arguments when DAG default args are provided (#29913) +- Deactivate DAGs deleted from within zip files (#30608) +- Recover from ``too old resource version exception`` by retrieving the latest ``resource_version`` (#30425) +- Fix possible race condition when refreshing DAGs (#30392) +- Use custom validator for OpenAPI request body (#30596) +- Fix ``TriggerDagRunOperator`` with deferrable parameter (#30406) +- Speed up dag runs deletion (#30330) +- Do not use template literals to construct html elements (#30447) +- Fix deprecation warning in ``example_sensor_decorator`` DAG (#30513) +- Avoid logging sensitive information in triggerer job log (#30110) +- Add a new parameter for base sensor to catch the exceptions in poke method (#30293) +- Fix dag run conf encoding with non-JSON serializable values (#28777) +- Added fixes for Airflow to be usable on Windows Dask-Workers (#30249) +- Force DAG last modified time to UTC (#30243) +- Fix EmptySkipOperator in example dag (#30269) +- Make the webserver startup respect update_fab_perms (#30246) +- Ignore error when changing log folder permissions (#30123) +- Disable ordering DagRuns by note (#30043) +- Fix reading logs from finished KubernetesExecutor worker pod (#28817) +- Mask out non-access bits when comparing file modes (#29886) +- Remove Run task action from UI (#29706) +- Fix log tailing issues with legacy log view (#29496) +- Fixes to how DebugExecutor handles sensors (#28528) +- Ensure that pod_mutation_hook is called before logging the pod name (#28534) +- Handle OverflowError on exponential backoff in next_run_calculation (#28172) - def __init__( - dataset_id: str, - dataset_resource: dict, - # ... - ): ... +Misc/Internal +""""""""""""" +- Make eager upgrade additional dependencies optional (#30811) +- Upgrade to pip 23.1.1 (#30808) +- Remove protobuf limitation from eager upgrade (#30182) +- Remove protobuf limitation from eager upgrade (#30182) +- Deprecate ``skip_exit_code`` in ``BashOperator`` (#30734) +- Remove gauge ``scheduler.tasks.running`` (#30374) +- Bump json5 to 1.0.2 and eslint-plugin-import to 2.27.5 in ``/airflow/www`` (#30568) +- Add tests to PythonOperator (#30362) +- Add asgiref as a core dependency (#30527) +- Discovery safe mode toggle comment clarification (#30459) +- Upgrade moment-timezone package to fix Tehran tz (#30455) +- Bump loader-utils from 2.0.0 to 2.0.4 in ``/airflow/www`` (#30319) +- Bump babel-loader from 8.1.0 to 9.1.0 in ``/airflow/www`` (#30316) +- DagBag: Use ``dag.fileloc`` instead of ``dag.full_filepath`` in exception message (#30610) +- Change log level of serialization information (#30239) +- Minor DagRun helper method cleanup (#30092) +- Improve type hinting in stats.py (#30024) +- Limit ``importlib-metadata`` backport to < 5.0.0 (#29924) +- Align cncf provider file names with AIP-21 (#29905) +- Upgrade FAB to 4.3.0 (#29766) +- Clear ExecutorLoader cache in tests (#29849) +- Lazy load Task Instance logs in UI (#29827) +- added warning log for max page limit exceeding api calls (#29788) +- Aggressively cache entry points in process (#29625) +- Don't use ``importlib.metadata`` to get Version for speed (#29723) +- Upgrade Mypy to 1.0 (#29468) +- Rename ``db export-cleaned`` to ``db export-archived`` (#29450) +- listener: simplify API by replacing SQLAlchemy event-listening by direct calls (#29289) +- No multi-line log entry for bash env vars (#28881) +- Switch to ruff for faster static checks (#28893) +- Remove horizontal lines in TI logs (#28876) +- Make allowed_deserialization_classes more intuitive (#28829) +- Propagate logs to stdout when in k8s executor pod (#28440, #30860) +- Fix code readability, add docstrings to json_client (#28619) +- AIP-51 - Misc. Compatibility Checks (#28375) +- Fix is_local for LocalKubernetesExecutor (#28288) +- Move Hive macros to the provider (#28538) +- Rerun flaky PinotDB integration test (#28562) +- Add pre-commit hook to check session default value (#28007) +- Refactor get_mapped_group_summaries for web UI (#28374) +- Add support for k8s 1.26 (#28320) +- Replace ``freezegun`` with time-machine (#28193) +- Completed D400 for ``airflow/kubernetes/*`` (#28212) +- Completed D400 for multiple folders (#27969) +- Drop k8s 1.21 and 1.22 support (#28168) +- Remove unused task_queue attr from k8s scheduler class (#28049) +- Completed D400 for multiple folders (#27767, #27768) -After: -.. code-block:: python +Doc only changes +"""""""""""""""" +- Add instructions on how to avoid accidental airflow upgrade/downgrade (#30813) +- Add explicit information about how to write task logs (#30732) +- Better explanation on how to log from tasks (#30746) +- Use correct import path for Dataset (#30617) +- Create ``audit_logs.rst`` (#30405) +- Adding taskflow API example for sensors (#30344) +- Add clarification about timezone aware dags (#30467) +- Clarity params documentation (#30345) +- Fix unit for task duration metric (#30273) +- Update dag-run.rst for dead links of cli commands (#30254) +- Add Write efficient Python code section to Reducing DAG complexity (#30158) +- Allow to specify which connection, variable or config are being looked up in the backend using ``*_lookup_pattern`` parameters (#29580) +- Add Documentation for notification feature extension (#29191) +- Clarify that executor interface is public but instances are not (#29200) +- Add Public Interface description to Airflow documentation (#28300) +- Add documentation for task group mapping (#28001) +- Some fixes to metrics doc (#30290) - def __init__( - dataset_resource: dict, - dataset_id: Optional[str] = None, - # ... - ): ... - -Changes in ``amazon`` provider package -"""""""""""""""""""""""""""""""""""""""""" - -We strive to ensure that there are no changes that may affect the end user, and your Python files, but this -release may contain changes that will require changes to your configuration, DAG Files or other integration -e.g. custom operators. - -Only changes unique to this provider are described here. You should still pay attention to the changes that -have been made to the core (including core operators) as they can affect the integration behavior -of this provider. - -This section describes the changes that have been made, and what you need to do to update your if -you use operators or hooks which integrate with Amazon services (including Amazon Web Service - AWS). - -Migration of AWS components -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -All AWS components (hooks, operators, sensors, example DAGs) will be grouped together as decided in -`AIP-21 `_. Migrated -components remain backwards compatible but raise a ``DeprecationWarning`` when imported from the old module. -Migrated are: - -.. list-table:: - :header-rows: 1 - - * - Old path - - New path - * - ``airflow.hooks.S3_hook.S3Hook`` - - ``airflow.providers.amazon.aws.hooks.s3.S3Hook`` - * - ``airflow.contrib.hooks.aws_athena_hook.AWSAthenaHook`` - - ``airflow.providers.amazon.aws.hooks.athena.AWSAthenaHook`` - * - ``airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook`` - - ``airflow.providers.amazon.aws.hooks.lambda_function.AwsLambdaHook`` - * - ``airflow.contrib.hooks.aws_sqs_hook.SQSHook`` - - ``airflow.providers.amazon.aws.hooks.sqs.SQSHook`` - * - ``airflow.contrib.hooks.aws_sns_hook.AwsSnsHook`` - - ``airflow.providers.amazon.aws.hooks.sns.AwsSnsHook`` - * - ``airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator`` - - ``airflow.providers.amazon.aws.operators.athena.AWSAthenaOperator`` - * - ``airflow.contrib.operators.awsbatch.AWSBatchOperator`` - - ``airflow.providers.amazon.aws.operators.batch.AwsBatchOperator`` - * - ``airflow.contrib.operators.awsbatch.BatchProtocol`` - - ``airflow.providers.amazon.aws.hooks.batch_client.AwsBatchProtocol`` - * - private attrs and methods on ``AWSBatchOperator`` - - ``airflow.providers.amazon.aws.hooks.batch_client.AwsBatchClient`` - * - n/a - - ``airflow.providers.amazon.aws.hooks.batch_waiters.AwsBatchWaiters`` - * - ``airflow.contrib.operators.aws_sqs_publish_operator.SQSPublishOperator`` - - ``airflow.providers.amazon.aws.operators.sqs.SQSPublishOperator`` - * - ``airflow.contrib.operators.aws_sns_publish_operator.SnsPublishOperator`` - - ``airflow.providers.amazon.aws.operators.sns.SnsPublishOperator`` - * - ``airflow.contrib.sensors.aws_athena_sensor.AthenaSensor`` - - ``airflow.providers.amazon.aws.sensors.athena.AthenaSensor`` - * - ``airflow.contrib.sensors.aws_sqs_sensor.SQSSensor`` - - ``airflow.providers.amazon.aws.sensors.sqs.SQSSensor`` - - -``airflow.providers.amazon.aws.hooks.emr.EmrHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.amazon.aws.operators.emr_add_steps.EmrAddStepsOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.amazon.aws.operators.emr_create_job_flow.EmrCreateJobFlowOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``airflow.providers.amazon.aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The default value for the `aws_conn_id `_ was accidentally set to 's3_default' instead of 'aws_default' in some of the emr operators in previous -versions. This was leading to EmrStepSensor not being able to find their corresponding emr cluster. With the new -changes in the EmrAddStepsOperator, EmrTerminateJobFlowOperator and EmrCreateJobFlowOperator this issue is -solved. - -``airflow.providers.amazon.aws.operators.batch.AwsBatchOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The ``AwsBatchOperator`` was refactored to extract an ``AwsBatchClient`` (and inherit from it). The -changes are mostly backwards compatible and clarify the public API for these classes; some -private methods on ``AwsBatchOperator`` for polling a job status were relocated and renamed -to surface new public methods on ``AwsBatchClient`` (and via inheritance on ``AwsBatchOperator``\ ). A -couple of job attributes are renamed on an instance of ``AwsBatchOperator``\ ; these were mostly -used like private attributes but they were surfaced in the public API, so any use of them needs -to be updated as follows: - - -* ``AwsBatchOperator().jobId`` -> ``AwsBatchOperator().job_id`` -* ``AwsBatchOperator().jobName`` -> ``AwsBatchOperator().job_name`` - -The ``AwsBatchOperator`` gets a new option to define a custom model for waiting on job status changes. -The ``AwsBatchOperator`` can use a new ``waiters`` parameter, an instance of ``AwsBatchWaiters``\ , to -specify that custom job waiters will be used to monitor a batch job. See the latest API -documentation for details. - -``airflow.providers.amazon.aws.sensors.athena.AthenaSensor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Replace parameter ``max_retires`` with ``max_retries`` to fix typo. - -``airflow.providers.amazon.aws.hooks.s3.S3Hook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Note: The order of arguments has changed for ``check_for_prefix``. -The ``bucket_name`` is now optional. It falls back to the ``connection schema`` attribute. -The ``delete_objects`` now returns ``None`` instead of a response, since the method now makes multiple api requests when the keys list length is > 1000. - -Changes in other provider packages -"""""""""""""""""""""""""""""""""" -We strive to ensure that there are no changes that may affect the end user and your Python files, but this -release may contain changes that will require changes to your configuration, DAG Files or other integration -e.g. custom operators. +Airflow 2.5.3 (2023-04-01) +-------------------------- -Only changes unique to providers are described here. You should still pay attention to the changes that -have been made to the core (including core operators) as they can affect the integration behavior -of this provider. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -This section describes the changes that have been made, and what you need to do to update your if -you use any code located in ``airflow.providers`` package. +No significant changes. -Changed return type of ``list_prefixes`` and ``list_keys`` methods in ``S3Hook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +^^^^^^^^^ +- Fix DagProcessorJob integration for standalone dag-processor (#30278) +- Fix proper termination of gunicorn when it hangs (#30188) +- Fix XCom.get_one exactly one exception text (#30183) +- Correct the VARCHAR size to 250. (#30178) +- Revert fix for on_failure_callback when task receives a SIGTERM (#30165) +- Move read only property to DagState to fix generated docs (#30149) +- Ensure that ``dag.partial_subset`` doesn't mutate task group properties (#30129) +- Fix inconsistent returned value of ``airflow dags next-execution`` cli command (#30117) +- Fix www/utils.dag_run_link redirection (#30098) +- Fix ``TriggerRuleDep`` when the mapped tasks count is 0 (#30084) +- Dag processor manager, add retry_db_transaction to _fetch_callbacks (#30079) +- Fix db clean command for mysql db (#29999) +- Avoid considering EmptyOperator in mini scheduler (#29979) +- Fix some long known Graph View UI problems (#29971, #30355, #30360) +- Fix dag docs toggle icon initial angle (#29970) +- Fix tags selection in DAGs UI (#29944) +- Including airflow/example_dags/sql/sample.sql in MANIFEST.in (#29883) +- Fixing broken filter in /taskinstance/list view (#29850) +- Allow generic param dicts (#29782) +- Fix update_mask in patch variable route (#29711) +- Strip markup from app_name if instance_name_has_markup = True (#28894) -Previously, the ``list_prefixes`` and ``list_keys`` methods returned ``None`` when there were no -results. The behavior has been changed to return an empty list instead of ``None`` in this -case. +Misc/Internal +^^^^^^^^^^^^^ +- Revert "Also limit importlib on Python 3.9 (#30069)" (#30209) +- Add custom_operator_name to @task.sensor tasks (#30131) +- Bump webpack from 5.73.0 to 5.76.0 in /airflow/www (#30112) +- Formatted config (#30103) +- Remove upper bound limit of astroid (#30033) +- Remove accidentally merged vendor daemon patch code (#29895) +- Fix warning in airflow tasks test command regarding absence of data_interval (#27106) -Removed HipChat integration -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Doc only changes +^^^^^^^^^^^^^^^^ +- Adding more information regarding top level code (#30040) +- Update workday example (#30026) +- Fix some typos in the DAGs docs (#30015) +- Update set-up-database.rst (#29991) +- Fix some typos on the kubernetes documentation (#29936) +- Fix some punctuation and grammar (#29342) -HipChat has reached end of life and is no longer available. -For more information please see -https://community.atlassian.com/t5/Stride-articles/Stride-and-Hipchat-Cloud-have-reached-End-of-Life-updated/ba-p/940248 +Airflow 2.5.2 (2023-03-15) +-------------------------- -``airflow.providers.salesforce.hooks.salesforce.SalesforceHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Replace parameter ``sandbox`` with ``domain``. According to change in simple-salesforce package. +The date-time fields passed as API parameters or Params should be RFC3339-compliant (#29395) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Rename ``sign_in`` function to ``get_conn``. +In case of API calls, it was possible that "+" passed as part of the date-time fields were not URL-encoded, and +such date-time fields could pass validation. Such date-time parameters should now be URL-encoded (as ``%2B``). -``airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook.create_segment`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +In case of parameters, we still allow IS8601-compliant date-time (so for example it is possible that +' ' was used instead of ``T`` separating date from time and no timezone was specified) but we raise +deprecation warning. -Rename parameter name from ``format`` to ``segment_format`` in PinotAdminHook function create_segment for pylint compatible +Default for ``[webserver] expose_hostname`` changed to ``False`` (#29547) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -``airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_partitions`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The default for ``[webserver] expose_hostname`` has been set to ``False``, instead of ``True``. This means administrators must opt-in to expose webserver hostnames to end users. -Rename parameter name from ``filter`` to ``partition_filter`` in HiveMetastoreHook function get_partitions for pylint compatible +Bug Fixes +^^^^^^^^^ +- Fix validation of date-time field in API and Parameter schemas (#29395) +- Fix grid logs for large logs (#29390) +- Fix on_failure_callback when task receives a SIGTERM (#29743) +- Update min version of python-daemon to fix containerd file limits (#29916) +- POST ``/dagRuns`` API should 404 if dag not active (#29860) +- DAG list sorting lost when switching page (#29756) +- Fix Scheduler crash when clear a previous run of a normal task that is now a mapped task (#29645) +- Convert moment with timezone to UTC instead of raising an exception (#29606) +- Fix clear dag run ``openapi`` spec responses by adding additional return type (#29600) +- Don't display empty rendered attrs in Task Instance Details page (#29545) +- Remove section check from get-value command (#29541) +- Do not show version/node in UI traceback for unauthenticated user (#29501) +- Make ``prev_logical_date`` variable offset-aware (#29454) +- Fix nested fields rendering in mapped operators (#29451) +- Datasets, next_run_datasets, remove unnecessary timestamp filter (#29441) +- ``Edgemodifier`` refactoring w/ labels in TaskGroup edge case (#29410) +- Fix Rest API update user output (#29409) +- Ensure Serialized DAG is deleted (#29407) +- Persist DAG and task doc values in TaskFlow API if explicitly set (#29399) +- Redirect to the origin page with all the params (#29212) +- Fixing Task Duration view in case of manual DAG runs only (#22015) (#29195) +- Remove poke method to fall back to parent implementation (#29146) +- PR: Introduced fix to run tasks on Windows systems (#29107) +- Fix warning in migrations about old config. (#29092) +- Emit dagrun failed duration when timeout (#29076) +- Handling error on cluster policy itself (#29056) +- Fix kerberos authentication for the REST API. (#29054) +- Fix leak sensitive field via V1EnvVar on exception (#29016) +- Sanitize url_for arguments before they are passed (#29039) +- Fix dag run trigger with a note. (#29228) +- Write action log to DB when DAG run is triggered via API (#28998) +- Resolve all variables in pickled XCom iterator (#28982) +- Allow URI without authority and host blocks in ``airflow connections add`` (#28922) +- Be more selective when adopting pods with KubernetesExecutor (#28899) +- KubenetesExecutor sends state even when successful (#28871) +- Annotate KubernetesExecutor pods that we don't delete (#28844) +- Throttle streaming log reads (#28818) +- Introduce dag processor job (#28799) +- Fix #28391 manual task trigger from UI fails for k8s executor (#28394) +- Logging poke info when external dag is not none and task_id and task_ids are none (#28097) +- Fix inconsistencies in checking edit permissions for a DAG (#20346) -``airflow.providers.ftp.hooks.ftp.FTPHook.list_directory`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Misc/Internal +^^^^^^^^^^^^^ +- Add a check for not templateable fields (#29821) +- Removed continue for not in (#29791) +- Move extra links position in grid view (#29703) +- Bump ``undici`` from ``5.9.1`` to ``5.19.1`` (#29583) +- Change expose_hostname default to false (#29547) +- Change permissions of config/password files created by airflow (#29495) +- Use newer setuptools ``v67.2.0`` (#29465) +- Increase max height for grid view elements (#29367) +- Clarify description of worker control config (#29247) +- Bump ``ua-parser-js`` from ``0.7.31`` to ``0.7.33`` in ``/airflow/www`` (#29172) +- Remove upper bound limitation for ``pytest`` (#29086) +- Check for ``run_id`` url param when linking to ``graph/gantt`` views (#29066) +- Clarify graph view dynamic task labels (#29042) +- Fixing import error for dataset (#29007) +- Update how PythonSensor returns values from ``python_callable`` (#28932) +- Add dep context description for better log message (#28875) +- Bump ``swagger-ui-dist`` from ``3.52.0`` to ``4.1.3`` in ``/airflow/www`` (#28824) +- Limit ``importlib-metadata`` backport to ``< 5.0.0`` (#29924, #30069) -Remove unnecessary parameter ``nlst`` in FTPHook function ``list_directory`` for pylint compatible +Doc only changes +^^^^^^^^^^^^^^^^ +- Update pipeline.rst - Fix query in ``merge_data()`` task (#29158) +- Correct argument name of Workday timetable in timetable.rst (#29896) +- Update ref anchor for env var link in Connection how-to doc (#29816) +- Better description for limit in api (#29773) +- Description of dag_processing.last_duration (#29740) +- Update docs re: template_fields typing and subclasses (#29725) +- Fix formatting of Dataset inlet/outlet note in TaskFlow concepts (#29678) +- Specific use-case: adding packages via requirements.txt in compose (#29598) +- Detect is 'docker-compose' existing (#29544) +- Add Landing Times entry to UI docs (#29511) +- Improve health checks in example docker-compose and clarify usage (#29408) +- Remove ``notes`` param from TriggerDagRunOperator docstring (#29298) +- Use ``schedule`` param rather than ``timetable`` in Timetables docs (#29255) +- Add trigger process to Airflow Docker docs (#29203) +- Update set-up-database.rst (#29104) +- Several improvements to the Params doc (#29062) +- Email Config docs more explicit env var examples (#28845) +- Listener plugin example added (#27905) -``airflow.providers.postgres.hooks.postgres.PostgresHook.copy_expert`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Remove unnecessary parameter ``open`` in PostgresHook function ``copy_expert`` for pylint compatible +Airflow 2.5.1 (2023-01-20) +-------------------------- -``airflow.providers.opsgenie.operators.opsgenie_alert.OpsgenieAlertOperator`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Significant Changes +^^^^^^^^^^^^^^^^^^^ -Change parameter name from ``visibleTo`` to ``visible_to`` in OpsgenieAlertOperator for pylint compatible +Trigger gevent ``monkeypatching`` via environment variable (#28283) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -``airflow.providers.imap.hooks.imap.ImapHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +If you are using gevent for your webserver deployment and used local settings to ``monkeypatch`` gevent, +you might want to replace local settings patching with an ``_AIRFLOW_PATCH_GEVENT`` environment variable +set to 1 in your webserver. This ensures gevent patching is done as early as possible. -``airflow.providers.imap.sensors.imap_attachment.ImapAttachmentSensor`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug Fixes +^^^^^^^^^ +- Fix masking of non-sensitive environment variables (#28802) +- Remove swagger-ui extra from connexion and install ``swagger-ui-dist`` via npm package (#28788) +- Fix ``UIAlert`` should_show when ``AUTH_ROLE_PUBLIC`` set (#28781) +- Only patch single label when adopting pod (#28776) +- Update CSRF token to expire with session (#28730) +- Fix "airflow tasks render" cli command for mapped task instances (#28698) +- Allow XComArgs for ``external_task_ids`` of ExternalTaskSensor (#28692) +- Row-lock TIs to be removed during mapped task expansion (#28689) +- Handle ConnectionReset exception in Executor cleanup (#28685) +- Fix description of output redirection for access_log for gunicorn (#28672) +- Add back join to zombie query that was dropped in #28198 (#28544) +- Fix calendar view for CronTriggerTimeTable dags (#28411) +- After running the DAG the employees table is empty. (#28353) +- Fix ``DetachedInstanceError`` when finding zombies in Dag Parsing process (#28198) +- Nest header blocks in ``divs`` to fix ``dagid`` copy nit on dag.html (#28643) +- Fix UI caret direction (#28624) +- Guard not-yet-expanded ti in trigger rule dep (#28592) +- Move TI ``setNote`` endpoints under TaskInstance in OpenAPI (#28566) +- Consider previous run in ``CronTriggerTimetable`` (#28532) +- Ensure correct log dir in file task handler (#28477) +- Fix bad pods pickled in executor_config (#28454) +- Add ``ensure_ascii=False`` in trigger dag run API (#28451) +- Add setters to MappedOperator on_*_callbacks (#28313) +- Fix ``ti._try_number`` for deferred and up_for_reschedule tasks (#26993) +- separate ``callModal`` from dag.js (#28410) +- A manual run can't look like a scheduled one (#28397) +- Dont show task/run durations when there is no start_date (#28395) +- Maintain manual scroll position in task logs (#28386) +- Correctly select a mapped task's "previous" task (#28379) +- Trigger gevent ``monkeypatching`` via environment variable (#28283) +- Fix db clean warnings (#28243) +- Make arguments 'offset' and 'length' not required (#28234) +- Make live logs reading work for "other" k8s executors (#28213) +- Add custom pickling hooks to ``LazyXComAccess`` (#28191) +- fix next run datasets error (#28165) +- Ensure that warnings from ``@dag`` decorator are reported in dag file (#28153) +- Do not warn when airflow dags tests command is used (#28138) +- Ensure the ``dagbag_size`` metric decreases when files are deleted (#28135) +- Improve run/task grid view actions (#28130) +- Make BaseJob.most_recent_job favor "running" jobs (#28119) +- Don't emit FutureWarning when code not calling old key (#28109) +- Add ``airflow.api.auth.backend.session`` to backend sessions in compose (#28094) +- Resolve false warning about calling conf.get on moved item (#28075) +- Return list of tasks that will be changed (#28066) +- Handle bad zip files nicely when parsing DAGs. (#28011) +- Prevent double loading of providers from local paths (#27988) +- Fix deadlock when chaining multiple empty mapped tasks (#27964) +- fix: current_state method on TaskInstance doesn't filter by map_index (#27898) +- Don't log CLI actions if db not initialized (#27851) +- Make sure we can get out of a faulty scheduler state (#27834) +- dagrun, ``next_dagruns_to_examine``, add MySQL index hint (#27821) +- Handle DAG disappearing mid-flight when dag verification happens (#27720) +- fix: continue checking sla (#26968) +- Allow generation of connection URI to work when no conn type (#26765) -ImapHook: +Misc/Internal +^^^^^^^^^^^^^ +- Remove limit for ``dnspython`` after eventlet got fixed (#29004) +- Limit ``dnspython`` to < ``2.3.0`` until eventlet incompatibility is solved (#28962) +- Add automated version replacement in example dag indexes (#28090) +- Cleanup and do housekeeping with plugin examples (#28537) +- Limit ``SQLAlchemy`` to below ``2.0`` (#28725) +- Bump ``json5`` from ``1.0.1`` to ``1.0.2`` in ``/airflow/www`` (#28715) +- Fix some docs on using sensors with taskflow (#28708) +- Change Architecture and OperatingSystem classes into ``Enums`` (#28627) +- Add doc-strings and small improvement to email util (#28634) +- Fix ``Connection.get_extra`` type (#28594) +- navbar, cap dropdown size, and add scroll bar (#28561) +- Emit warnings for ``conf.get*`` from the right source location (#28543) +- Move MyPY plugins of ours to dev folder (#28498) +- Add retry to ``purge_inactive_dag_warnings`` (#28481) +- Re-enable Plyvel on ARM as it now builds cleanly (#28443) +- Add SIGUSR2 handler for LocalTaskJob and workers to aid debugging (#28309) +- Convert ``test_task_command`` to Pytest and ``unquarantine`` tests in it (#28247) +- Make invalid characters exception more readable (#28181) +- Bump decode-uri-component from ``0.2.0`` to ``0.2.2`` in ``/airflow/www`` (#28080) +- Use asserts instead of exceptions for executor not started (#28019) +- Simplify dataset ``subgraph`` logic (#27987) +- Order TIs by ``map_index`` (#27904) +- Additional info about Segmentation Fault in ``LocalTaskJob`` (#27381) +Doc only changes +^^^^^^^^^^^^^^^^ +- Mention mapped operator in cluster policy doc (#28885) +- Slightly improve description of Dynamic DAG generation preamble (#28650) +- Restructure Docs (#27235) +- Update scheduler docs about low priority tasks (#28831) +- Clarify that versioned constraints are fixed at release time (#28762) +- Clarify about docker compose (#28729) +- Adding an example dag for dynamic task mapping (#28325) +- Use docker compose v2 command (#28605) +- Add AIRFLOW_PROJ_DIR to docker-compose example (#28517) +- Remove outdated Optional Provider Feature outdated documentation (#28506) +- Add documentation for [core] mp_start_method config (#27993) +- Documentation for the LocalTaskJob return code counter (#27972) +- Note which versions of Python are supported (#27798) -* The order of arguments has changed for ``has_mail_attachment``\ , - ``retrieve_mail_attachments`` and ``download_mail_attachments``. -* A new ``mail_filter`` argument has been added to each of those. -``airflow.providers.http.hooks.http.HttpHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Airflow 2.5.0 (2022-12-02) +-------------------------- -The HTTPHook is now secured by default: ``verify=True`` (before: ``verify=False``\ ) -This can be overwritten by using the extra_options param as ``{'verify': False}``. +Significant Changes +^^^^^^^^^^^^^^^^^^^ -``airflow.providers.cloudant.hooks.cloudant.CloudantHook`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``airflow dags test`` no longer performs a backfill job (#26400) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +In order to make ``airflow dags test`` more useful as a testing and debugging tool, we no +longer run a backfill job and instead run a "local task runner". Users can still backfill +their DAGs using the ``airflow dags backfill`` command. -* upgraded cloudant version from ``>=0.5.9,<2.0`` to ``>=2.0`` -* removed the use of the ``schema`` attribute in the connection -* removed ``db`` function since the database object can also be retrieved by calling ``cloudant_session['database_name']`` +Airflow config section ``kubernetes`` renamed to ``kubernetes_executor`` (#26873) +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -For example: +KubernetesPodOperator no longer considers any core kubernetes config params, so this section now only applies to kubernetes executor. Renaming it reduces potential for confusion. -.. code-block:: python +``AirflowException`` is now thrown as soon as any dependent tasks of ExternalTaskSensor fails (#27190) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - from airflow.providers.cloudant.hooks.cloudant import CloudantHook +``ExternalTaskSensor`` no longer hangs indefinitely when ``failed_states`` is set, an ``execute_date_fn`` is used, and some but not all of the dependent tasks fail. +Instead, an ``AirflowException`` is thrown as soon as any of the dependent tasks fail. +Any code handling this failure in addition to timeouts should move to caching the ``AirflowException`` ``BaseClass`` and not only the ``AirflowSensorTimeout`` subclass. - with CloudantHook().get_conn() as cloudant_session: - database = cloudant_session["database_name"] +The Airflow config option ``scheduler.deactivate_stale_dags_interval`` has been renamed to ``scheduler.parsing_cleanup_interval`` (#27828). +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -See the `docs `_ for more information on how to use the new cloudant version. +The old option will continue to work but will issue deprecation warnings, and will be removed entirely in Airflow 3. -``airflow.providers.snowflake`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +New Features +^^^^^^^^^^^^ +- ``TaskRunner``: notify of component start and finish (#27855) +- Add DagRun state change to the Listener plugin system(#27113) +- Metric for raw task return codes (#27155) +- Add logic for XComArg to pull specific map indexes (#27771) +- Clear TaskGroup (#26658, #28003) +- Add critical section query duration metric (#27700) +- Add: #23880 :: Audit log for ``AirflowModelViews(Variables/Connection)`` (#24079, #27994, #27923) +- Add postgres 15 support (#27444) +- Expand tasks in mapped group at run time (#27491) +- reset commits, clean submodules (#27560) +- scheduler_job, add metric for scheduler loop timer (#27605) +- Allow datasets to be used in taskflow (#27540) +- Add expanded_ti_count to ti context (#27680) +- Add user comment to task instance and dag run (#26457, #27849, #27867) +- Enable copying DagRun JSON to clipboard (#27639) +- Implement extra controls for SLAs (#27557) +- add dag parsed time in DAG view (#27573) +- Add max_wait for exponential_backoff in BaseSensor (#27597) +- Expand tasks in mapped group at parse time (#27158) +- Add disable retry flag on backfill (#23829) +- Adding sensor decorator (#22562) +- Api endpoint update ti (#26165) +- Filtering datasets by recent update events (#26942) +- Support ``Is /not`` Null filter for value is None on ``webui`` (#26584) +- Add search to datasets list (#26893) +- Split out and handle 'params' in mapped operator (#26100) +- Add authoring API for TaskGroup mapping (#26844) +- Add ``one_done`` trigger rule (#26146) +- Create a more efficient airflow dag test command that also has better local logging (#26400) +- Support add/remove permissions to roles commands (#26338) +- Auto tail file logs in Web UI (#26169) +- Add triggerer info to task instance in API (#26249) +- Flag to deserialize value on custom XCom backend (#26343) -When initializing a Snowflake hook or operator, the value used for ``snowflake_conn_id`` was always ``snowflake_conn_id``\ , regardless of whether or not you specified a value for it. The default ``snowflake_conn_id`` value is now switched to ``snowflake_default`` for consistency and will be properly overridden when specified. +Improvements +^^^^^^^^^^^^ +- Allow depth-first execution (#27827) +- UI: Update offset height if data changes (#27865) +- Improve TriggerRuleDep typing and readability (#27810) +- Make views requiring session, keyword only args (#27790) +- Optimize ``TI.xcom_pull()`` with explicit task_ids and map_indexes (#27699) +- Allow hyphens in pod id used by k8s executor (#27737) +- optimise task instances filtering (#27102) +- Use context managers to simplify log serve management (#27756) +- Fix formatting leftovers (#27750) +- Improve task deadlock messaging (#27734) +- Improve "sensor timeout" messaging (#27733) +- Replace urlparse with ``urlsplit`` (#27389) +- Align TaskGroup semantics to AbstractOperator (#27723) +- Add new files to parsing queue on every loop of dag processing (#27060) +- Make Kubernetes Executor & Scheduler resilient to error during PMH execution (#27611) +- Separate dataset deps into individual graphs (#27356) +- Use log.exception where more economical than log.error (#27517) +- Move validation ``branch_task_ids`` into ``SkipMixin`` (#27434) +- Coerce LazyXComAccess to list when pushed to XCom (#27251) +- Update cluster-policies.rst docs (#27362) +- Add warning if connection type already registered within the provider (#27520) +- Activate debug logging in commands with --verbose option (#27447) +- Add classic examples for Python Operators (#27403) +- change ``.first()`` to ``.scalar()`` (#27323) +- Improve reset_dag_run description (#26755) +- Add examples and ``howtos`` about sensors (#27333) +- Make grid view widths adjustable (#27273) +- Sorting plugins custom menu links by category before name (#27152) +- Simplify DagRun.verify_integrity (#26894) +- Add mapped task group info to serialization (#27027) +- Correct the JSON style used for Run config in Grid View (#27119) +- No ``extra__conn_type__`` prefix required for UI behaviors (#26995) +- Improve dataset update blurb (#26878) +- Rename kubernetes config section to kubernetes_executor (#26873) +- decode params for dataset searches (#26941) +- Get rid of the DAGRun details page & rely completely on Grid (#26837) +- Fix scheduler ``crashloopbackoff`` when using ``hostname_callable`` (#24999) +- Reduce log verbosity in KubernetesExecutor. (#26582) +- Don't iterate tis list twice for no reason (#26740) +- Clearer code for PodGenerator.deserialize_model_file (#26641) +- Don't import kubernetes unless you have a V1Pod (#26496) +- Add updated_at column to DagRun and Ti tables (#26252) +- Move the deserialization of custom XCom Backend to 2.4.0 (#26392) +- Avoid calculating all elements when one item is needed (#26377) +- Add ``__future__``.annotations automatically by isort (#26383) +- Handle list when serializing expand_kwargs (#26369) +- Apply PEP-563 (Postponed Evaluation of Annotations) to core airflow (#26290) +- Add more weekday operator and sensor examples #26071 (#26098) +- Align TaskGroup semantics to AbstractOperator (#27723) -Other changes -""""""""""""" +Bug Fixes +^^^^^^^^^ +- Gracefully handle whole config sections being renamed (#28008) +- Add allow list for imports during deserialization (#27887) +- Soft delete datasets that are no longer referenced in DAG schedules or task outlets (#27828) +- Redirect to home view when there are no valid tags in the URL (#25715) +- Refresh next run datasets info in dags view (#27839) +- Make MappedTaskGroup depend on its expand inputs (#27876) +- Make DagRun state updates for paused DAGs faster (#27725) +- Don't explicitly set include_examples to False on task run command (#27813) +- Fix menu border color (#27789) +- Fix backfill queued task getting reset to scheduled state. (#23720) +- Fix clearing child dag mapped tasks from parent dag (#27501) +- Handle json encoding of ``V1Pod`` in task callback (#27609) +- Fix ExternalTaskSensor can't check zipped dag (#27056) +- Avoid re-fetching DAG run in TriggerDagRunOperator (#27635) +- Continue on exception when retrieving metadata (#27665) +- External task sensor fail fix (#27190) +- Add the default None when pop actions (#27537) +- Display parameter values from serialized dag in trigger dag view. (#27482, #27944) +- Move TriggerDagRun conf check to execute (#27035) +- Resolve trigger assignment race condition (#27072) +- Update google_analytics.html (#27226) +- Fix some bug in web ui dags list page (auto-refresh & jump search null state) (#27141) +- Fixed broken URL for docker-compose.yaml (#26721) +- Fix xcom arg.py .zip bug (#26636) +- Fix 404 ``taskInstance`` errors and split into two tables (#26575) +- Fix browser warning of improper thread usage (#26551) +- template rendering issue fix (#26390) +- Clear ``autoregistered`` DAGs if there are any import errors (#26398) +- Fix ``from airflow import version`` lazy import (#26239) +- allow scroll in triggered dag runs modal (#27965) -This release also includes changes that fall outside any of the sections above. - -Standardized "extra" requirements -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -We standardized the Extras names and synchronized providers package names with the main airflow extras. - -We deprecated a number of extras in 2.0. - -.. list-table:: - :header-rows: 1 - - * - Deprecated extras - - New extras - * - atlas - - apache.atlas - * - aws - - amazon - * - azure - - microsoft.azure - * - azure_blob_storage - - microsoft.azure - * - azure_data_lake - - microsoft.azure - * - azure_cosmos - - microsoft.azure - * - azure_container_instances - - microsoft.azure - * - cassandra - - apache.cassandra - * - druid - - apache.druid - * - gcp - - google - * - gcp_api - - google - * - hdfs - - apache.hdfs - * - hive - - apache.hive - * - kubernetes - - cncf.kubernetes - * - mssql - - microsoft.mssql - * - pinot - - apache.pinot - * - webhdfs - - apache.webhdfs - * - winrm - - apache.winrm - - -For example: - -If you want to install integration for Apache Atlas, then instead of ``pip install apache-airflow[atlas]`` -you should use ``pip install apache-airflow[apache.atlas]``. - -NOTE! - -If you want to install integration for Microsoft Azure, then instead of - -.. code-block:: - - pip install 'apache-airflow[azure_blob_storage,azure_data_lake,azure_cosmos,azure_container_instances]' - -you should run ``pip install 'apache-airflow[microsoft.azure]'`` - -If you want to install integration for Amazon Web Services, then instead of -``pip install 'apache-airflow[s3,emr]'``\ , you should execute ``pip install 'apache-airflow[aws]'`` - -The deprecated extras will be removed in 3.0. - -Simplify the response payload of endpoints /dag_stats and /task_stats -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The response of endpoints ``/dag_stats`` and ``/task_stats`` help UI fetch brief statistics about DAGs and Tasks. The format was like - -.. code-block:: json - - { - "example_http_operator": [ - { - "state": "success", - "count": 0, - "dag_id": "example_http_operator", - "color": "green" - }, - { - "state": "running", - "count": 0, - "dag_id": "example_http_operator", - "color": "lime" - } - ] - } - -The ``dag_id`` was repeated in the payload, which makes the response payload unnecessarily bigger. - -Now the ``dag_id`` will not appear repeated in the payload, and the response format is like - -.. code-block:: json - - { - "example_http_operator": [ - { - "state": "success", - "count": 0, - "color": "green" - }, - { - "state": "running", - "count": 0, - "color": "lime" - } - ] - } +Misc/Internal +^^^^^^^^^^^^^ +- Remove ``is_mapped`` attribute (#27881) +- Simplify FAB table resetting (#27869) +- Fix old-style typing in Base Sensor (#27871) +- Switch (back) to late imports (#27730) +- Completed D400 for multiple folders (#27748) +- simplify notes accordion test (#27757) +- completed D400 for ``airflow/callbacks/* airflow/cli/*`` (#27721) +- Completed D400 for ``airflow/api_connexion/* directory`` (#27718) +- Completed D400 for ``airflow/listener/* directory`` (#27731) +- Completed D400 for ``airflow/lineage/* directory`` (#27732) +- Update API & Python Client versions (#27642) +- Completed D400 & D401 for ``airflow/api/*`` directory (#27716) +- Completed D400 for multiple folders (#27722) +- Bump ``minimatch`` from ``3.0.4 to 3.0.8`` in ``/airflow/www`` (#27688) +- Bump loader-utils from ``1.4.1 to 1.4.2 ``in ``/airflow/www`` (#27697) +- Disable nested task mapping for now (#27681) +- bump alembic minimum version (#27629) +- remove unused code.html (#27585) +- Enable python string normalization everywhere (#27588) +- Upgrade dependencies in order to avoid backtracking (#27531) +- Strengthen a bit and clarify importance of triaging issues (#27262) +- Deduplicate type hints (#27508) +- Add stub 'yield' to ``BaseTrigger.run`` (#27416) +- Remove upper-bound limit to dask (#27415) +- Limit Dask to under ``2022.10.1`` (#27383) +- Update old style typing (#26872) +- Enable string normalization for docs (#27269) +- Slightly faster up/downgrade tests (#26939) +- Deprecate use of core get_kube_client in PodManager (#26848) +- Add ``memray`` files to ``gitignore / dockerignore`` (#27001) +- Bump sphinx and ``sphinx-autoapi`` (#26743) +- Simplify ``RTIF.delete_old_records()`` (#26667) +- migrate last react files to typescript (#26112) +- Work around ``pyupgrade`` edge cases (#26384) +Doc only changes +^^^^^^^^^^^^^^^^ +- Document dag_file_processor_timeouts metric as deprecated (#27067) +- Drop support for PostgreSQL 10 (#27594) +- Update index.rst (#27529) +- Add note about pushing the lazy XCom proxy to XCom (#27250) +- Fix BaseOperator link (#27441) +- [docs] best-practices add use variable with template example. (#27316) +- docs for custom view using plugin (#27244) +- Update graph view and grid view on overview page (#26909) +- Documentation fixes (#26819) +- make consistency on markup title string level (#26696) +- Add documentation to dag test function (#26713) +- Fix broken URL for ``docker-compose.yaml`` (#26726) +- Add a note against use of top level code in timetable (#26649) +- Fix example_datasets dag names (#26495) +- Update docs: zip-like effect is now possible in task mapping (#26435) +- changing to task decorator in docs from classic operator use (#25711) .. spelling:: diff --git a/airflow-core/docs/administration-and-deployment/cluster-policies.rst b/airflow-core/docs/administration-and-deployment/cluster-policies.rst index e0b71eef7a353..b67234c61b58f 100644 --- a/airflow-core/docs/administration-and-deployment/cluster-policies.rst +++ b/airflow-core/docs/administration-and-deployment/cluster-policies.rst @@ -104,7 +104,7 @@ configure local settings. [project.entry-points.'airflow.policy'] _ = 'my_airflow_plugin.policies' - The entrypoint group must be ``airflow.policy``, and the name is ignored. The value should be your module (or class) decorated with the ``@hookimpl`` marker. + The entrypoint group must be ``airflow.policy``, and the name must be unique per entry, otherwise duplicate entries will be ignored by pluggy. The value should be your module (or class) decorated with the ``@hookimpl`` marker. Once you have done that, and you have installed your distribution into your Airflow env, the policy functions will get called by the various Airflow components. (The exact call order is undefined, so don't rely on any particular calling order if you have multiple plugins). @@ -152,7 +152,7 @@ Here's an example of enforcing a maximum timeout policy on every task: :start-after: [START example_task_cluster_policy] :end-before: [END example_task_cluster_policy] -You could also implement to protect against common errors, rather than as technical security controls. For example, don't run tasks without airflow owners: +You could also implement to protect against common errors, rather than as technical security controls. For example, don't run tasks without Airflow owners: .. literalinclude:: /../tests/unit/cluster_policies/__init__.py :language: python diff --git a/airflow-core/docs/administration-and-deployment/dag-bundles.rst b/airflow-core/docs/administration-and-deployment/dag-bundles.rst index 7dc03ebab1451..53bd5e16afbdf 100644 --- a/airflow-core/docs/administration-and-deployment/dag-bundles.rst +++ b/airflow-core/docs/administration-and-deployment/dag-bundles.rst @@ -50,6 +50,9 @@ Airflow supports multiple types of dag Bundles, each catering to specific use ca **airflow.providers.git.bundles.git.GitDagBundle** These bundles integrate with Git repositories, allowing Airflow to fetch dags directly from a repository. +**airflow.providers.amazon.aws.bundles.s3.S3DagBundle** + These bundles reference an S3 bucket containing DAG files. They do not support versioning of the bundle, meaning tasks always run using the latest code. + Configuring dag bundles ----------------------- @@ -65,7 +68,7 @@ For example, adding multiple dag bundles to your ``airflow.cfg`` file: dag_bundle_config_list = [ { "name": "my_git_repo", - "classpath": "airflow.dag_processing.bundles.git.GitDagBundle", + "classpath": "airflow.providers.git.bundles.git.GitDagBundle", "kwargs": {"tracking_ref": "main", "git_conn_id": "my_git_conn"} }, { @@ -80,9 +83,42 @@ For example, adding multiple dag bundles to your ``airflow.cfg`` file: The whitespace, particularly on the last line, is important so a multi-line value works properly. More details can be found in the the `configparser docs `_. +If you want a view url different from the default provided by the dag bundle, you can change the url in the kwargs of the dag bundle configuration. +For example, if you want to use a custom URL for the git dag bundle: + +.. code-block:: ini + + [dag_processor] + dag_bundle_config_list = [ + { + "name": "my_git_repo", + "classpath": "airflow.dag_processing.bundles.git.GitDagBundle", + "kwargs": { + "tracking_ref": "main", + "git_conn_id": "my_git_conn", + "view_url_template": "https://my.custom.git.repo/view/{subdir}", + } + } + ] + +Above, the ``view_url_template`` is set to a custom URL that will be used to view the Dags in the ``my_git_repo`` bundle. The ``{subdir}`` placeholder will be replaced +with the ``subdir`` attribute of the bundle. The placeholders are attributes of the bundle. You cannot use any placeholder outside of the bundle's attributes. +When you specify a custom URL, it overrides the default URL provided by the dag bundle. + +The url is verified for safety, and if it is not safe, the view url for the bundle will be set to ``None``. This is to prevent any potential security issues with unsafe URLs. + You can also override the :ref:`config:dag_processor__refresh_interval` per dag bundle by passing it in kwargs. This controls how often the dag processor refreshes, or looks for new files, in the dag bundles. +Starting Airflow 3.0.2 git is pre installed in the base image. However, if you are using versions prior 3.0.2, you would need to install git in your docker image. + +.. code-block:: Dockerfile + + RUN apt-get update && apt-get install -y git + ENV GIT_PYTHON_GIT_EXECUTABLE=/usr/bin/git + ENV GIT_PYTHON_REFRESH=quiet + + Writing custom dag bundles -------------------------- diff --git a/airflow-core/docs/administration-and-deployment/dagfile-processing.rst b/airflow-core/docs/administration-and-deployment/dagfile-processing.rst index 353f12a72a246..d2050e1df6af3 100644 --- a/airflow-core/docs/administration-and-deployment/dagfile-processing.rst +++ b/airflow-core/docs/administration-and-deployment/dagfile-processing.rst @@ -16,13 +16,13 @@ specific language governing permissions and limitations under the License. -DAG File Processing +Dag File Processing ------------------- -DAG File Processing refers to the process of reading the python files that define your dags and storing them such that the scheduler can schedule them. +Dag File Processing refers to the process of reading the python files that define your Dags and storing them such that the scheduler can schedule them. -There are two primary components involved in DAG file processing. The ``DagFileProcessorManager`` is a process executing an infinite loop that determines which files need -to be processed, and the ``DagFileProcessorProcess`` is a separate process that is started to convert an individual file into one or more DAG objects. +There are two primary components involved in Dag file processing. The ``DagFileProcessorManager`` is a process executing an infinite loop that determines which files need +to be processed, and the ``DagFileProcessorProcess`` is a separate process that is started to convert an individual file into one or more Dag objects. The ``DagFileProcessorManager`` runs user codes. As a result, it runs as a standalone process by running the ``airflow dag-processor`` CLI command. @@ -30,61 +30,61 @@ The ``DagFileProcessorManager`` runs user codes. As a result, it runs as a stand ``DagFileProcessorManager`` has the following steps: -1. Check for new files: If the elapsed time since the DAG was last refreshed is > :ref:`config:scheduler__dag_dir_list_interval` then update the file paths list +1. Check for new files: If the elapsed time since the Dag was last refreshed is > :ref:`config:dag_processor__refresh_interval` then update the file paths list 2. Exclude recently processed files: Exclude files that have been processed more recently than :ref:`min_file_process_interval` and have not been modified 3. Queue file paths: Add files discovered to the file path queue 4. Process files: Start a new ``DagFileProcessorProcess`` for each file, up to a maximum of :ref:`config:dag_processor__parsing_processes` -5. Collect results: Collect the result from any finished DAG processors +5. Collect results: Collect the result from any finished Dag processors 6. Log statistics: Print statistics and emit ``dag_processing.total_parse_time`` ``DagFileProcessorProcess`` has the following steps: 1. Process file: The entire process must complete within :ref:`dag_file_processor_timeout` -2. The DAG files are loaded as Python module: Must complete within :ref:`dagbag_import_timeout` -3. Process modules: Find DAG objects within Python module -4. Return DagBag: Provide the ``DagFileProcessorManager`` a list of the discovered DAG objects +2. The Dag files are loaded as Python module: Must complete within :ref:`dagbag_import_timeout` +3. Process modules: Find Dag objects within Python module +4. Return DagBag: Provide the ``DagFileProcessorManager`` a list of the discovered Dag objects -Fine-tuning your DAG processor performance +Fine-tuning your Dag processor performance ------------------------------------------ -What impacts DAG processor's performance +What impacts Dag processor's performance """""""""""""""""""""""""""""""""""""""" -The DAG processor is responsible for continuously parsing DAG files and synchronizing with the DAG in the database -In order to fine-tune your DAG processor, you need to include a number of factors: +The Dag processor is responsible for continuously parsing Dag files and synchronizing with the Dag in the database +In order to fine-tune your Dag processor, you need to include a number of factors: * The kind of deployment you have - * what kind of filesystem you have to share the dags (impacts performance of continuously reading dags) + * what kind of filesystem you have to share the Dags (impacts performance of continuously reading Dags) * how fast the filesystem is (in many cases of distributed cloud filesystem you can pay extra to get more throughput/faster filesystem) * how much memory you have for your processing * how much CPU you have available * how much networking throughput you have available -* The logic and definition of your DAG structure: - * how many DAG files you have - * how many dags you have in your files - * how large the DAG files are (remember DAG parser needs to read and parse the file every n seconds) +* The logic and definition of your Dag structure: + * how many Dag files you have + * how many Dags you have in your files + * how large the Dag files are (remember Dag parser needs to read and parse the file every n seconds) * how complex they are (i.e. how fast they can be parsed, how many tasks and dependencies they have) - * whether parsing your DAG file involves importing a lot of libraries or heavy processing at the top level + * whether parsing your Dag file involves importing a lot of libraries or heavy processing at the top level (Hint! It should not. See :ref:`best_practices/top_level_code`) -* The DAG processor configuration - * How many DAG processors you have - * How many parsing processes you have in your DAG processor - * How much time DAG processor waits between re-parsing of the same DAG (it happens continuously) - * How many callbacks you run per DAG processor loop +* The Dag processor configuration + * How many Dag processors you have + * How many parsing processes you have in your Dag processor + * How much time Dag processor waits between re-parsing of the same Dag (it happens continuously) + * How many callbacks you run per Dag processor loop -How to approach DAG processor's fine-tuning +How to approach Dag processor's fine-tuning """"""""""""""""""""""""""""""""""""""""""" Airflow gives you a lot of "knobs" to turn to fine tune the performance but it's a separate task, -depending on your particular deployment, your DAG structure, hardware availability and expectations, +depending on your particular deployment, your Dag structure, hardware availability and expectations, to decide which knobs to turn to get best effect for you. Part of the job when managing the deployment is to decide what you are going to optimize for. Some users are ok with -30 seconds delays of new DAG parsing, at the expense of lower CPU usage, whereas some other users -expect the dags to be parsed almost instantly when they appear in the dags folder at the +30 seconds delays of new Dag parsing, at the expense of lower CPU usage, whereas some other users +expect the Dags to be parsed almost instantly when they appear in the Dags folder at the expense of higher CPU usage for example. Airflow gives you the flexibility to decide, but you should find out what aspect of performance is @@ -103,30 +103,30 @@ to observe and monitor your systems): * based on your expectations and observations - decide what is your next improvement and go back to the observation of your performance, bottlenecks. Performance improvement is an iterative process. -What resources might limit DAG processors's performance +What resources might limit Dag processors's performance """"""""""""""""""""""""""""""""""""""""""""""""""""""" There are several areas of resource usage that you should pay attention to: -* FileSystem performance. The Airflow DAG processor relies heavily on parsing (sometimes a lot) of Python - files, which are often located on a shared filesystem. The DAG processor continuously reads and +* FileSystem performance. The Airflow Dag processor relies heavily on parsing (sometimes a lot) of Python + files, which are often located on a shared filesystem. The Dag processor continuously reads and re-parses those files. The same files have to be made available to workers, so often they are stored in a distributed filesystem. You can use various filesystems for that purpose (NFS, CIFS, EFS, GCS fuse, Azure File System are good examples). There are various parameters you can control for those filesystems and fine-tune their performance, but this is beyond the scope of this document. You should observe statistics and usage of your filesystem to determine if problems come from the filesystem performance. For example there are anecdotal evidences that increasing IOPS (and paying more) for the - EFS performance, dramatically improves stability and speed of parsing Airflow dags when EFS is used. + EFS performance, dramatically improves stability and speed of parsing Airflow Dags when EFS is used. * Another solution to FileSystem performance, if it becomes your bottleneck, is to turn to alternative - mechanisms of distributing your dags. Embedding dags in your image and GitSync distribution have both - the property that the files are available locally for the DAG processor and it does not have to use a - distributed filesystem to read the files, the files are available locally for the the DAG processor and it is + mechanisms of distributing your Dags. Embedding Dags in your image and GitSync distribution have both + the property that the files are available locally for the Dag processor and it does not have to use a + distributed filesystem to read the files, the files are available locally for the the Dag processor and it is usually as fast as it can be, especially if your machines use fast SSD disks for local storage. Those distribution mechanisms have other characteristics that might make them not the best choice for you, but if your problems with performance come from distributed filesystem performance, they might be the best approach to follow. * Database connections and Database usage might become a problem as you want to increase performance and - process more things in parallel. Airflow is known for being "database-connection hungry" - the more dags + process more things in parallel. Airflow is known for being "database-connection hungry" - the more Dags you have and the more you want to process in parallel, the more database connections will be opened. This is generally not a problem for MySQL as its model of handling connections is thread-based, but this might be a problem for Postgres, where connection handling is process-based. It is a general consensus @@ -134,12 +134,12 @@ There are several areas of resource usage that you should pay attention to: `PGBouncer `_ as a proxy to your database. The :doc:`helm-chart:index` supports PGBouncer out-of-the-box. * CPU usage is most important for FileProcessors - those are the processes that parse and execute - Python DAG files. Since DAG processors typically triggers such parsing continuously, when you have a lot of dags, + Python Dag files. Since Dag processors typically triggers such parsing continuously, when you have a lot of Dags, the processing might take a lot of CPU. You can mitigate it by increasing the :ref:`config:dag_processor__min_file_process_interval`, but this is one of the mentioned trade-offs, result of this is that changes to such files will be picked up slower and you will see delays between submitting the files and getting them available in Airflow UI and executed by Scheduler. Optimizing - the way how your dags are built, avoiding external data sources is your best approach to improve CPU + the way how your Dags are built, avoiding external data sources is your best approach to improve CPU usage. If you have more CPUs available, you can increase number of processing threads :ref:`config:dag_processor__parsing_processes`. * Airflow might use quite a significant amount of memory when you try to get more performance out of it. @@ -152,14 +152,14 @@ There are several areas of resource usage that you should pay attention to: kind of memory you are observing. Usually you should look at ``working memory`` (names might vary depending on your deployment) rather than ``total memory used``. -What can you do, to improve DAG processor's performance +What can you do, to improve Dag processor's performance """"""""""""""""""""""""""""""""""""""""""""""""""""""" When you know what your resource usage is, the improvements that you can consider might be: -* improve the logic, efficiency of parsing and reduce complexity of your top-level DAG Python code. It is +* improve the logic, efficiency of parsing and reduce complexity of your top-level Dag Python code. It is parsed continuously so optimizing that code might bring tremendous improvements, especially if you try - to reach out to some external databases etc. while parsing dags (this should be avoided at all cost). + to reach out to some external databases etc. while parsing Dags (this should be avoided at all cost). The :ref:`best_practices/top_level_code` explains what are the best practices for writing your top-level Python code. The :ref:`best_practices/reducing_dag_complexity` document provides some areas that you might look at when you want to reduce complexity of your code. @@ -168,31 +168,31 @@ When you know what your resource usage is, the improvements that you can conside actions like increasing number of parsing processes might bring improvements in performance at the expense of higher utilization of those. * increase hardware capacity (for example if you see that CPU is limiting you or that I/O you use for - DAG filesystem is at its limits). Often the problem with DAG processor performance is + Dag filesystem is at its limits). Often the problem with Dag processor performance is simply because your system is not "capable" enough and this might be the only way, unless a shared database or filesystem is a bottleneck. -* experiment with different values for the "DAG processor tunables". Often you might get better effects by +* experiment with different values for the "Dag processor tunables". Often you might get better effects by simply exchanging one performance aspect for another. For example if you want to decrease the - CPU usage, you might increase file processing interval (but the result will be that new dags will + CPU usage, you might increase file processing interval (but the result will be that new Dags will appear with bigger delay). Usually performance tuning is the art of balancing different aspects. -* sometimes you change DAG processor behavior slightly (for example change parsing sort order) +* sometimes you change Dag processor behavior slightly (for example change parsing sort order) in order to get better fine-tuned results for your particular deployment. -DAG processor Configuration options +Dag processor Configuration options """"""""""""""""""""""""""""""""""" -The following config settings can be used to control aspects of the Scheduler. -However, you can also look at other non-performance-related scheduler configuration parameters available at -:doc:`../configurations-ref` in the ``[scheduler]`` section. +The following config settings can be used to control aspects of the Dag processor. +However, you can also look at other non-performance-related Dag processor configuration parameters available at +:doc:`../configurations-ref` in the ``[dag_processor]`` section. - :ref:`config:dag_processor__file_parsing_sort_mode` - The scheduler will list and sort the DAG files to decide the parsing order. + The Dag processor will list and sort the Dag files to decide the parsing order. - :ref:`config:dag_processor__min_file_process_interval` - Number of seconds after which a DAG file is re-parsed. The DAG file is parsed every - min_file_process_interval number of seconds. Updates to dags are reflected after + Number of seconds after which a Dag file is re-parsed. The Dag file is parsed every + ``min_file_process_interval`` number of seconds. Updates to Dags are reflected after this interval. Keeping this number low will increase CPU usage. - :ref:`config:dag_processor__parsing_processes` - The scheduler can run multiple processes in parallel to parse DAG files. This defines + The Dag processor can run multiple processes in parallel to parse Dag files. This defines how many processes will run. diff --git a/airflow-core/docs/administration-and-deployment/index.rst b/airflow-core/docs/administration-and-deployment/index.rst index ec39a526a7717..720f5f695eb2c 100644 --- a/airflow-core/docs/administration-and-deployment/index.rst +++ b/airflow-core/docs/administration-and-deployment/index.rst @@ -18,7 +18,7 @@ Administration and Deployment ===================================== -This section contains information about deploying dags into production and the administration of airflow deployments. +This section contains information about deploying dags into production and the administration of Airflow deployments. .. toctree:: :maxdepth: 2 diff --git a/airflow-core/docs/administration-and-deployment/listeners.rst b/airflow-core/docs/administration-and-deployment/listeners.rst index 2ae7899e26199..b1ccf181d0c91 100644 --- a/airflow-core/docs/administration-and-deployment/listeners.rst +++ b/airflow-core/docs/administration-and-deployment/listeners.rst @@ -101,7 +101,7 @@ Asset Events -------------- - ``on_asset_created`` -- ``on_dataset_alias_created`` +- ``on_asset_alias_created`` - ``on_asset_changed`` Asset events occur when Asset management operations are run. @@ -131,7 +131,7 @@ Airflow defines the specification as `hookspec `. -Listener API is meant to be called across all dags and all operators. You can't listen to events generated by specific dags. For that behavior, try methods like ``on_success_callback`` and ``pre_execute``. These provide callbacks for particular DAG authors or operator creators. The logs and ``print()`` calls will be handled as part of the listeners. +Listener API is meant to be called across all dags and all operators. You can't listen to events generated by specific dags. For that behavior, try methods like ``on_success_callback`` and ``pre_execute``. These provide callbacks for particular Dag authors or operator creators. The logs and ``print()`` calls will be handled as part of the listeners. Compatibility note diff --git a/airflow-core/docs/administration-and-deployment/logging-monitoring/advanced-logging-configuration.rst b/airflow-core/docs/administration-and-deployment/logging-monitoring/advanced-logging-configuration.rst index 2cfc44e725515..fa34e74f3140b 100644 --- a/airflow-core/docs/administration-and-deployment/logging-monitoring/advanced-logging-configuration.rst +++ b/airflow-core/docs/administration-and-deployment/logging-monitoring/advanced-logging-configuration.rst @@ -25,7 +25,7 @@ Not all configuration options are available from the ``airflow.cfg`` file. The c how to configure logging for tasks, because the logs generated by tasks are not only logged in separate files by default but has to be also accessible via the webserver. -By default standard airflow component logs are written to the ``$AIRFLOW_HOME/logs`` directory, but you +By default standard Airflow component logs are written to the ``$AIRFLOW_HOME/logs`` directory, but you can also customize it and configure it as you want by overriding Python logger configuration that can be configured by providing custom logging configuration object. You can also create and use logging configuration for specific operators and tasks. @@ -34,7 +34,7 @@ Some configuration options require that the logging config class be overwritten. configuration of Airflow and modifying it to suit your needs. The default configuration can be seen in the -`airflow_local_settings.py template `_ +`airflow_local_settings.py template `_ and you can see the loggers and handlers used there. See :ref:`Configuring local settings ` for details on how to diff --git a/airflow-core/docs/administration-and-deployment/logging-monitoring/callbacks.rst b/airflow-core/docs/administration-and-deployment/logging-monitoring/callbacks.rst index 377d06579cf54..c2201921cd135 100644 --- a/airflow-core/docs/administration-and-deployment/logging-monitoring/callbacks.rst +++ b/airflow-core/docs/administration-and-deployment/logging-monitoring/callbacks.rst @@ -20,13 +20,19 @@ Callbacks ========= -A valuable component of logging and monitoring is the use of task callbacks to act upon changes in state of a given task, or across all tasks in a given DAG. -For example, you may wish to alert when certain tasks have failed, or have the last task in your DAG invoke a callback when it succeeds. +A valuable component of logging and monitoring is the use of task callbacks to act upon changes in state of a given DAG or task, or across all tasks in a given DAG. +For example, you may wish to alert when certain tasks have failed, or invoke a callback when your DAG succeeds. + +There are three different places where callbacks can be defined. + +- Callbacks set in the DAG definition will be applied at the DAG level. +- Using ``default_args``, callbacks can be set for each task in a DAG. +- Individual callbacks can be set for a task by setting that callback within the task definition itself. .. note:: - Callback functions are only invoked when the task state changes due to execution by a worker. - As such, task changes set by the command line interface (:doc:`CLI <../../howto/usage-cli>`) or user interface (:doc:`UI <../../ui>`) do not + Callback functions are only invoked when the DAG or task state changes due to execution by a worker. + As such, DAG and task changes set by the command line interface (:doc:`CLI <../../howto/usage-cli>`) or user interface (:doc:`UI <../../ui>`) do not execute callback functions. .. warning:: @@ -39,26 +45,32 @@ For example, you may wish to alert when certain tasks have failed, or have the l Callback Types -------------- -There are five types of task events that can trigger a callback: +There are six types of events that can trigger a callback: =========================================== ================================================================ Name Description =========================================== ================================================================ -``on_success_callback`` Invoked when the task :ref:`succeeds ` -``on_failure_callback`` Invoked when the task :ref:`fails ` -``on_retry_callback`` Invoked when the task is :ref:`up for retry ` +``on_success_callback`` Invoked when the :ref:`DAG succeeds ` or :ref:`task succeeds `. + Available at the DAG or task level. +``on_failure_callback`` Invoked when the task :ref:`fails `. + Available at the DAG or task level. +``on_retry_callback`` Invoked when the task is :ref:`up for retry `. + Available only at the task level. ``on_execute_callback`` Invoked right before the task begins executing. + Available only at the task level. ``on_skipped_callback`` Invoked when the task is :ref:`running ` and AirflowSkipException raised. Explicitly it is NOT called if a task is not started to be executed because of a preceding branching decision in the DAG or a trigger rule which causes execution to skip so that the task execution is never scheduled. + Available only at the task level. =========================================== ================================================================ Example ------- -In the following example, failures in any task call the ``task_failure_alert`` function, and success in the last task calls the ``dag_success_alert`` function: +In the following example, failures in ``task1`` call the ``task_failure_alert`` function, and success at DAG level calls the ``dag_success_alert`` function. +Before each task begins to execute, the ``task_execute_callback`` function will be called: .. code-block:: python @@ -69,6 +81,10 @@ In the following example, failures in any task call the ``task_failure_alert`` f from airflow.providers.standard.operators.empty import EmptyOperator + def task_execute_callback(context): + print(f"Task has begun execution, task_instance_key_str: {context['task_instance_key_str']}") + + def task_failure_alert(context): print(f"Task has failed, task_instance_key_str: {context['task_instance_key_str']}") @@ -83,13 +99,13 @@ In the following example, failures in any task call the ``task_failure_alert`` f start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), dagrun_timeout=datetime.timedelta(minutes=60), catchup=False, - on_success_callback=None, - on_failure_callback=task_failure_alert, + on_success_callback=dag_success_alert, + default_args={"on_execute_callback": task_execute_callback}, tags=["example"], ): - task1 = EmptyOperator(task_id="task1") + task1 = EmptyOperator(task_id="task1", on_failure_callback=[task_failure_alert]) task2 = EmptyOperator(task_id="task2") - task3 = EmptyOperator(task_id="task3", on_success_callback=[dag_success_alert]) + task3 = EmptyOperator(task_id="task3") task1 >> task2 >> task3 .. note:: diff --git a/airflow-core/docs/administration-and-deployment/logging-monitoring/check-health.rst b/airflow-core/docs/administration-and-deployment/logging-monitoring/check-health.rst index daca7dc8738c9..c803791762ef0 100644 --- a/airflow-core/docs/administration-and-deployment/logging-monitoring/check-health.rst +++ b/airflow-core/docs/administration-and-deployment/logging-monitoring/check-health.rst @@ -36,7 +36,7 @@ Webserver Health Check Endpoint ------------------------------- To check the health status of your Airflow instance, you can simply access the endpoint -``/health``. It will return a JSON object in which a high-level glance is provided. +``/api/v2/monitor/health``. It will return a JSON object in which a high-level glance is provided. .. code-block:: JSON @@ -79,7 +79,7 @@ To check the health status of your Airflow instance, you can simply access the e Note that the ``status`` and ``latest_dag_processor_heartbeat`` fields in the health check response will be null for deployments that do not include a ``dag_processor`` component. -Please keep in mind that the HTTP response code of ``/health`` endpoint **should not** be used to determine the health +Please keep in mind that the HTTP response code of ``/api/v2/monitor/health`` endpoint **should not** be used to determine the health status of the application. The return code is only indicative of the state of the rest call (200 for success). Served by the web server, this health check endpoint is independent of the newer :ref:`Scheduler Health Check Server `, which optionally runs on each scheduler. @@ -96,7 +96,7 @@ Scheduler Health Check Server ----------------------------- In order to check scheduler health independent of the web server, Airflow optionally starts a small HTTP server -in each scheduler to serve a scheduler ``\health`` endpoint. It returns status code ``200`` when the scheduler +in each scheduler to serve a scheduler ``/health`` endpoint. It returns status code ``200`` when the scheduler is healthy and status code ``503`` when the scheduler is unhealthy. To run this server in each scheduler, set ``[scheduler]enable_health_check`` to ``True``. By default, it is ``False``. The server is running on the port specified by the ``[scheduler]scheduler_health_check_server_port`` option. By default, it is ``8974``. We are diff --git a/airflow-core/docs/administration-and-deployment/logging-monitoring/logging-tasks.rst b/airflow-core/docs/administration-and-deployment/logging-monitoring/logging-tasks.rst index 81def9b50cdd9..5ab451f29b66d 100644 --- a/airflow-core/docs/administration-and-deployment/logging-monitoring/logging-tasks.rst +++ b/airflow-core/docs/administration-and-deployment/logging-monitoring/logging-tasks.rst @@ -57,7 +57,7 @@ In addition, you can supply a remote location to store current logs and backups. Writing to task logs from your code ----------------------------------- -Airflow uses standard the Python `logging `_ framework to +Airflow uses the standard Python `logging `_ framework to write logs, and for the duration of a task, the root logger is configured to write to the task's log. Most operators will write logs to the task log automatically. This is because they @@ -89,7 +89,7 @@ Grouping of log lines Like CI pipelines also Airflow logs can be quite large and become hard to read. Sometimes therefore it is useful to group sections of log areas and provide folding of text areas to hide non relevant content. Airflow therefore implements a compatible log message grouping like -`Github `_ and +`GitHub `_ and `Azure DevOps `_ such that areas of text can be folded. The implemented scheme is compatible such that tools making output in CI can leverage the same experience in Airflow directly. @@ -113,7 +113,7 @@ When displaying the logs in web UI, the display of logs will be condensed: [2024-03-08, 23:30:18 CET] {logging_mixin.py:188} ⯈ Non important details [2024-03-08, 23:30:18 CET] {logging_mixin.py:188} INFO - Here is again some standard text. -If you click on the log text label, the detailed log lies will be displayed. +If you click on the log text label, the detailed log lines will be displayed. .. code-block:: text @@ -178,7 +178,7 @@ Most task handlers send logs upon completion of a task. In order to view logs in In triggerer, logs are served unless the service is started with option ``--skip-serve-logs``. The server is running on the port specified by ``worker_log_server_port`` option in ``[logging]`` section, and option ``triggerer_log_server_port`` for triggerer. Defaults are 8793 and 8794, respectively. -Communication between the webserver and the worker is signed with the key specified by ``secret_key`` option in ``[webserver]`` section. You must ensure that the key matches so that communication can take place without problems. +Communication between the webserver and the worker is signed with the key specified by ``secret_key`` option in ``[api]`` section. You must ensure that the key matches so that communication can take place without problems. We are using `Gunicorn `__ as a WSGI server. Its configuration options can be overridden with the ``GUNICORN_CMD_ARGS`` env variable. For details, see `Gunicorn settings `__. diff --git a/airflow-core/docs/administration-and-deployment/logging-monitoring/metrics.rst b/airflow-core/docs/administration-and-deployment/logging-monitoring/metrics.rst index 34296c2ed7b0d..56170c4c41fee 100644 --- a/airflow-core/docs/administration-and-deployment/logging-monitoring/metrics.rst +++ b/airflow-core/docs/administration-and-deployment/logging-monitoring/metrics.rst @@ -243,8 +243,6 @@ Name Description ``pool.scheduled_slots`` Number of scheduled slots in the pool. Metric with pool_name tagging. ``pool.starving_tasks.`` Number of starving tasks in the pool ``pool.starving_tasks`` Number of starving tasks in the pool. Metric with pool_name tagging. -``task.cpu_usage..`` Percentage of CPU used by a task -``task.mem_usage..`` Percentage of memory used by a task ``triggers.running.`` Number of triggers currently running for a triggerer (described by hostname) ``triggers.running`` Number of triggers currently running for a triggerer (described by hostname). Metric with hostname tagging. diff --git a/airflow-core/docs/administration-and-deployment/modules_management.rst b/airflow-core/docs/administration-and-deployment/modules_management.rst index 865dab06867cb..e6b8b7d14fc7b 100644 --- a/airflow-core/docs/administration-and-deployment/modules_management.rst +++ b/airflow-core/docs/administration-and-deployment/modules_management.rst @@ -318,7 +318,7 @@ try to import the package now: >>> We can also use :envvar:`PYTHONPATH` variable with the airflow commands. -For example, if we run the following airflow command: +For example, if we run the following Airflow command: .. code-block:: bash diff --git a/airflow-core/docs/administration-and-deployment/plugins.rst b/airflow-core/docs/administration-and-deployment/plugins.rst index 47c0f2fecdf84..2a74ca54d2020 100644 --- a/airflow-core/docs/administration-and-deployment/plugins.rst +++ b/airflow-core/docs/administration-and-deployment/plugins.rst @@ -104,18 +104,16 @@ looks like: name = None # A list of references to inject into the macros namespace macros = [] - # A list of Blueprint object created from flask.Blueprint. For use with the flask_appbuilder based GUI - flask_blueprints = [] # A list of dictionaries containing FastAPI app objects and some metadata. See the example below. fastapi_apps = [] # A list of dictionaries containing FastAPI middleware factory objects and some metadata. See the example below. fastapi_root_middlewares = [] - # A list of dictionaries containing FlaskAppBuilder BaseView object and some metadata. See example below - appbuilder_views = [] - # A list of dictionaries containing kwargs for FlaskAppBuilder add_link. See example below - appbuilder_menu_items = [] + # A list of dictionaries containing external views and some metadata. See the example below. + external_views = [] + # A list of dictionaries containing react apps and some metadata. See the example below. + react_apps = [] - # A callback to perform actions when airflow starts and the plugin is loaded. + # A callback to perform actions when Airflow starts and the plugin is loaded. # NOTE: Ensure your plugin has *args, and **kwargs in the method definition # to protect against extra parameters injected into the on_load(...) # function in future changes @@ -164,35 +162,21 @@ definitions in Airflow. # This is the class you derive to create a plugin from airflow.plugins_manager import AirflowPlugin - from airflow.security import permissions - from airflow.providers.fab.www.auth import has_access from fastapi import FastAPI from fastapi.middleware.trustedhost import TrustedHostMiddleware - from flask import Blueprint - from flask_appbuilder import expose, BaseView as AppBuilderBaseView # Importing base classes that we need to derive from airflow.hooks.base import BaseHook from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator - # Will show up under airflow.macros.test_plugin.plugin_macro - # and in templates through {{ macros.test_plugin.plugin_macro }} + # Will show up in templates through {{ macros.test_plugin.plugin_macro }} def plugin_macro(): pass - # Creating a flask blueprint to integrate the templates and static folder - bp = Blueprint( - "test_plugin", - __name__, - template_folder="templates", # registers airflow/plugins/templates as a Jinja template folder - static_folder="static", - static_url_path="/static/test_plugin", - ) - - # Creating a FastAPI application to integrate in airflow Rest API. + # Creating a FastAPI application to integrate in Airflow Rest API. app = FastAPI() @@ -212,54 +196,49 @@ definitions in Airflow. "name": "Name of the Middleware", } - - # Creating a flask appbuilder BaseView - class TestAppBuilderBaseView(AppBuilderBaseView): - default_view = "test" - - @expose("/") - @has_access( - [ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - ] - ) - def test(self): - return self.render_template("test_plugin/test.html", content="Hello galaxy!") - - - # Creating a flask appbuilder BaseView - class TestAppBuilderBaseNoMenuView(AppBuilderBaseView): - default_view = "test" - - @expose("/") - @has_access( - [ - (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), - ] - ) - def test(self): - return self.render_template("test_plugin/test.html", content="Hello galaxy!") - - - v_appbuilder_view = TestAppBuilderBaseView() - v_appbuilder_package = { - "name": "Test View", - "category": "Test Plugin", - "view": v_appbuilder_view, + # Creating an external view that will be rendered in the Airflow UI. + external_view_with_metadata = { + # Name of the external view, this will be displayed in the UI. + "name": "Name of the External View", + # Source URL of the external view. This URL can be templated using context variables, depending on the location where the external view is rendered + # the context variables available will be different, i.e a subset of (DAG_ID, RUN_ID, TASK_ID, MAP_INDEX). + "href": "https://example.com/{DAG_ID}/{RUN_ID}/{TASK_ID}/{MAP_INDEX}", + # Destination of the external view. This is used to determine where the view will be loaded in the UI. + # Supported locations are Literal["nav", "dag", "dag_run", "task", "task_instance"], default to "nav". + "destination": "dag_run", + # Optional icon, url to an svg file. + "icon": "https://example.com/icon.svg", + # Optional dark icon for the dark theme, url to an svg file. If not provided, "icon" will be used for both light and dark themes. + "icon_dark_mode": "https://example.com/dark_icon.svg", + # Optional parameters, relative URL location for the External View rendering. If not provided, external view will be rendeded as an external link. If provided + # will be rendered inside an Iframe in the UI. Should not contain a leading slash. + "url_route": "my_external_view", + # Optional category, only relevant for destination "nav". This is used to group the external links in the navigation bar. We will match the existing + # menus of ["browse", "docs", "admin", "user"] and if there's no match then create a new menu. + "category": "browse", } - v_appbuilder_nomenu_view = TestAppBuilderBaseNoMenuView() - v_appbuilder_nomenu_package = {"view": v_appbuilder_nomenu_view} - - # Creating flask appbuilder Menu Items - appbuilder_mitem = { - "name": "Google", - "href": "https://www.google.com", - "category": "Search", - } - appbuilder_mitem_toplevel = { - "name": "Apache", - "href": "https://www.apache.org/", + react_app_with_metadata = { + # Name of the React app, this will be displayed in the UI. + "name": "Name of the React App", + # Bundle URL of the React app. This is the URL where the React app is served from. It can be a static file or a CDN. + # This URL can be templated using context variables, depending on the location where the external view is rendered + # the context variables available will be different, i.e a subset of (DAG_ID, RUN_ID, TASK_ID, MAP_INDEX). + "bundle_url": "https://example.com/static/js/my_react_app.js", + # Destination of the react app. This is used to determine where the app will be loaded in the UI. + # Supported locations are Literal["nav", "dag", "dag_run", "task", "task_instance"], default to "nav". + # It can also be put inside of an existing page, the supported views are ["dashboard", "dag_overview", "task_overview"]. You can position + # element in the existing page via the css `order` rule which will determine the flex order. + "destination": "dag_run", + # Optional icon, url to an svg file. + "icon": "https://example.com/icon.svg", + # Optional dark icon for the dark theme, url to an svg file. If not provided, "icon" will be used for both light and dark themes. + "icon_dark_mode": "https://example.com/dark_icon.svg", + # URL route for the React app, relative to the Airflow UI base URL. Should not contain a leading slash. + "url_route": "my_react_app", + # Optional category, only relevant for destination "nav". This is used to group the react apps in the navigation bar. We will match the existing + # menus of ["browse", "docs", "admin", "user"] and if there's no match then create a new menu. + "category": "browse", } @@ -267,11 +246,10 @@ definitions in Airflow. class AirflowTestPlugin(AirflowPlugin): name = "test_plugin" macros = [plugin_macro] - flask_blueprints = [bp] fastapi_apps = [app_with_metadata] fastapi_root_middlewares = [middleware_with_metadata] - appbuilder_views = [v_appbuilder_package, v_appbuilder_nomenu_package] - appbuilder_menu_items = [appbuilder_mitem, appbuilder_mitem_toplevel] + external_views = [external_view_with_metadata] + react_apps = [react_app_with_metadata] .. seealso:: :doc:`/howto/define-extra-link` @@ -307,21 +285,10 @@ will automatically load the registered plugins from the entrypoint list. # my_package/my_plugin.py from airflow.plugins_manager import AirflowPlugin - from flask import Blueprint - - # Creating a flask blueprint to integrate the templates and static folder - bp = Blueprint( - "test_plugin", - __name__, - template_folder="templates", # registers airflow/plugins/templates as a Jinja template folder - static_folder="static", - static_url_path="/static/test_plugin", - ) class MyAirflowPlugin(AirflowPlugin): name = "my_namespace" - flask_blueprints = [bp] Then inside pyproject.toml: @@ -330,6 +297,18 @@ Then inside pyproject.toml: [project.entry-points."airflow.plugins"] my_plugin = "my_package.my_plugin:MyAirflowPlugin" +Flask Appbuilder and Flask Blueprints in Airflow 3 +-------------------------------------------------- + +Airflow 2 supported Flask Appbuilder views (``appbuilder_views``), Flask AppBuilder menu items (``appbuilder_menu_items``), +and Flask Blueprints (``flask_blueprints``) in plugins. These have been superseded in Airflow 3 by External Views (``external_views``), Fast API apps (``fastapi_apps``), +FastAPI middlewares (``fastapi_root_middlewares``) and React apps (``react_apps``) that allow extended functionality and better integration with the Airflow UI. + +All new plugins should use the new interfaces. + +However, a compatibility layer is provided for Flask and FAB plugins to ease the transition to Airflow 3 - simply install the FAB provider and tweak the code +following Airflow 3 migration guide. This compatibility layer allows you to continue using your existing Flask Appbuilder views, Flask Blueprints and Flask Appbuilder menu items. + Troubleshooting --------------- diff --git a/airflow-core/docs/administration-and-deployment/priority-weight.rst b/airflow-core/docs/administration-and-deployment/priority-weight.rst index fd83c6806df13..e0a2d43baec67 100644 --- a/airflow-core/docs/administration-and-deployment/priority-weight.rst +++ b/airflow-core/docs/administration-and-deployment/priority-weight.rst @@ -21,7 +21,8 @@ Priority Weights ================ ``priority_weight`` defines priorities in the executor queue. The default ``priority_weight`` is ``1``, and can be -bumped to any integer. Moreover, each task has a true ``priority_weight`` that is calculated based on its +bumped to any integer; larger numbers mean higher priority. +Moreover, each task has a true ``priority_weight`` that is calculated based on its ``weight_rule`` which defines the weighting method used for the effective total priority weight of the task. Below are the weighting methods. By default, Airflow's weighting method is ``downstream``. diff --git a/airflow-core/docs/administration-and-deployment/web-stack.rst b/airflow-core/docs/administration-and-deployment/web-stack.rst index 9a16906ab6acf..5f2159649b3b0 100644 --- a/airflow-core/docs/administration-and-deployment/web-stack.rst +++ b/airflow-core/docs/administration-and-deployment/web-stack.rst @@ -20,6 +20,10 @@ Web Stack ========= + +Configuration +------------- + Sometimes you want to deploy the backend and frontend behind a variable url path prefix. To do so, you can configure the url :ref:`config:api__base_url` for instance, set it to ``http://localhost:28080/d12345``. All the APIs routes will @@ -30,3 +34,26 @@ and served successfully. You will also need to update the execution API server url :ref:`config:core__execution_api_server_url` for tasks to be able to reach the API with the new prefix. + +Separating API Servers +----------------------- + +By default, both the Core API Server and the Execution API Server are served together: + +.. code-block:: bash + + airflow api-server + # same as + airflow api-server --apps all + # or + airflow api-server --apps core,execution + +If you want to separate the Core API Server and the Execution API Server, you can run them +separately. This might be useful for scaling them independently or for deploying them on different machines. + +.. code-block:: bash + + # serve only the Core API Server + airflow api-server --apps core + # serve only the Execution API Server + airflow api-server --apps execution diff --git a/airflow-core/docs/authoring-and-scheduling/asset-scheduling.rst b/airflow-core/docs/authoring-and-scheduling/asset-scheduling.rst new file mode 100644 index 0000000000000..fc9086cb0783e --- /dev/null +++ b/airflow-core/docs/authoring-and-scheduling/asset-scheduling.rst @@ -0,0 +1,315 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Asset-Aware Scheduling +====================== + +.. versionadded:: 2.4 + +Quickstart +---------- + +In addition to scheduling dags based on time, you can also schedule dags to run based on when a task updates an asset. + +.. code-block:: python + + from airflow.sdk import DAG, Asset + + with DAG(...): + MyOperator( + # this task updates example.csv + outlets=[Asset("s3://asset-bucket/example.csv")], + ..., + ) + + + with DAG( + # this DAG should be run when example.csv is updated (by dag1) + schedule=[Asset("s3://asset-bucket/example.csv")], + ..., + ): + ... + + +.. image:: /img/ui-dark/asset_scheduled_dags.png + +.. seealso:: + + :ref:`asset_definitions` for how to declare assets. + +Schedule dags with assets +------------------------- + +You can use assets to specify data dependencies in your dags. The following example shows how after the ``producer`` task in the ``producer`` DAG successfully completes, Airflow schedules the ``consumer`` DAG. Airflow marks an asset as ``updated`` only if the task completes successfully. If the task fails or if it is skipped, no update occurs, and Airflow doesn't schedule the ``consumer`` DAG. + +.. code-block:: python + + example_asset = Asset("s3://asset/example.csv") + + with DAG(dag_id="producer", ...): + BashOperator(task_id="producer", outlets=[example_asset], ...) + + with DAG(dag_id="consumer", schedule=[example_asset], ...): + ... + + +You can find a listing of the relationships between assets and dags in the :ref:`Asset Views `. + +Multiple assets +----------------- + +Because the ``schedule`` parameter is a list, dags can require multiple assets. Airflow schedules a DAG after **all** assets the DAG consumes have been updated at least once since the last time the DAG ran: + +.. code-block:: python + + with DAG( + dag_id="multiple_assets_example", + schedule=[ + example_asset_1, + example_asset_2, + example_asset_3, + ], + ..., + ): + ... + + +If one asset is updated multiple times before all consumed assets update, the downstream DAG still only runs once, as shown in this illustration: + +.. :: + ASCII art representation of this diagram + + example_asset_1 x----x---x---x----------------------x- + example_asset_2 -------x---x-------x------x----x------ + example_asset_3 ---------------x-----x------x--------- + DAG runs created * * + +.. graphviz:: + + graph asset_event_timeline { + graph [layout=neato] + { + node [margin=0 fontcolor=blue width=0.1 shape=point label=""] + e1 [pos="1,2.5!"] + e2 [pos="2,2.5!"] + e3 [pos="2.5,2!"] + e4 [pos="4,2.5!"] + e5 [pos="5,2!"] + e6 [pos="6,2.5!"] + e7 [pos="7,1.5!"] + r7 [pos="7,1!" shape=star width=0.25 height=0.25 fixedsize=shape] + e8 [pos="8,2!"] + e9 [pos="9,1.5!"] + e10 [pos="10,2!"] + e11 [pos="11,1.5!"] + e12 [pos="12,2!"] + e13 [pos="13,2.5!"] + r13 [pos="13,1!" shape=star width=0.25 height=0.25 fixedsize=shape] + } + { + node [shape=none label="" width=0] + end_ds1 [pos="14,2.5!"] + end_ds2 [pos="14,2!"] + end_ds3 [pos="14,1.5!"] + } + + { + node [shape=none margin=0.25 fontname="roboto,sans-serif"] + example_asset_1 [ pos="-0.5,2.5!"] + example_asset_2 [ pos="-0.5,2!"] + example_asset_3 [ pos="-0.5,1.5!"] + dag_runs [label="DagRuns created" pos="-0.5,1!"] + } + + edge [color=lightgrey] + + example_asset_1 -- e1 -- e2 -- e4 -- e6 -- e13 -- end_ds1 + example_asset_2 -- e3 -- e5 -- e8 -- e10 -- e12 -- end_ds2 + example_asset_3 -- e7 -- e9 -- e11 -- end_ds3 + + } + +Fetching information from a triggering asset event +---------------------------------------------------- + +A triggered DAG can fetch information from the asset that triggered it using the ``triggering_asset_events`` template or parameter. See more at :ref:`templates-ref`. + +Example: + +.. code-block:: python + + example_snowflake_asset = Asset("snowflake://my_db/my_schema/my_table") + + with DAG(dag_id="load_snowflake_data", schedule="@hourly", ...): + SQLExecuteQueryOperator( + task_id="load", conn_id="snowflake_default", outlets=[example_snowflake_asset], ... + ) + + with DAG(dag_id="query_snowflake_data", schedule=[example_snowflake_asset], ...): + SQLExecuteQueryOperator( + task_id="query", + conn_id="snowflake_default", + sql=""" + SELECT * + FROM my_db.my_schema.my_table + WHERE "updated_at" >= '{{ (triggering_asset_events.values() | first | first).source_dag_run.data_interval_start }}' + AND "updated_at" < '{{ (triggering_asset_events.values() | first | first).source_dag_run.data_interval_end }}'; + """, + ) + + @task + def print_triggering_asset_events(triggering_asset_events=None): + for asset, asset_list in triggering_asset_events.items(): + print(asset, asset_list) + print(asset_list[0].source_dag_run.dag_id) + + print_triggering_asset_events() + +Note that this example is using `(.values() | first | first) `_ to fetch the first of one asset given to the DAG, and the first of one AssetEvent for that asset. An implementation can be quite complex if you have multiple assets, potentially with multiple AssetEvents. + + +Manipulating queued asset events through REST API +--------------------------------------------------- + +.. versionadded:: 2.9 + +In this example, the DAG ``waiting_for_asset_1_and_2`` will be triggered when tasks update both assets "asset-1" and "asset-2". Once "asset-1" is updated, Airflow creates a record. This ensures that Airflow knows to trigger the DAG when "asset-2" is updated. We call such records queued asset events. + +.. code-block:: python + + with DAG( + dag_id="waiting_for_asset_1_and_2", + schedule=[Asset("asset-1"), Asset("asset-2")], + ..., + ): + ... + + +``queuedEvent`` API endpoints are introduced to manipulate such records. + +* Get a queued asset event for a DAG: ``/assets/queuedEvent/{uri}`` +* Get queued asset events for a DAG: ``/dags/{dag_id}/assets/queuedEvent`` +* Delete a queued asset event for a DAG: ``/assets/queuedEvent/{uri}`` +* Delete queued asset events for a DAG: ``/dags/{dag_id}/assets/queuedEvent`` +* Get queued asset events for an asset: ``/dags/{dag_id}/assets/queuedEvent/{uri}`` +* Delete queued asset events for an asset: ``DELETE /dags/{dag_id}/assets/queuedEvent/{uri}`` + + For how to use REST API and the parameters needed for these endpoints, please refer to :doc:`Airflow API `. + +Advanced asset scheduling with conditional expressions +-------------------------------------------------------- + +Apache Airflow includes advanced scheduling capabilities that use conditional expressions with assets. This feature allows you to define complex dependencies for DAG executions based on asset updates, using logical operators for more control on workflow triggers. + +Logical operators for assets +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Airflow supports two logical operators for combining asset conditions: + +- **AND (``&``)**: Specifies that the DAG should be triggered only after all of the specified assets have been updated. +- **OR (``|``)**: Specifies that the DAG should be triggered when any of the specified assets is updated. + +These operators enable you to configure your Airflow workflows to use more complex asset update conditions, making them more dynamic and flexible. + +Example Use +------------- + +**Scheduling based on multiple asset updates** + +To schedule a DAG to run only when two specific assets have both been updated, use the AND operator (``&``): + +.. code-block:: python + + dag1_asset = Asset("s3://dag1/output_1.txt") + dag2_asset = Asset("s3://dag2/output_1.txt") + + with DAG( + # Consume asset 1 and 2 with asset expressions + schedule=(dag1_asset & dag2_asset), + ..., + ): + ... + +**Scheduling based on any asset update** + +To trigger a DAG execution when either one of two assets is updated, apply the OR operator (``|``): + +.. code-block:: python + + with DAG( + # Consume asset 1 or 2 with asset expressions + schedule=(dag1_asset | dag2_asset), + ..., + ): + ... + +**Complex Conditional Logic** + +For scenarios requiring more intricate conditions, such as triggering a DAG when one asset is updated or when both of two other assets are updated, combine the OR and AND operators: + +.. code-block:: python + + dag3_asset = Asset("s3://dag3/output_3.txt") + + with DAG( + # Consume asset 1 or both 2 and 3 with asset expressions + schedule=(dag1_asset | (dag2_asset & dag3_asset)), + ..., + ): + ... + + +Scheduling based on asset aliases +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Since asset events added to an alias are just simple asset events, a downstream DAG depending on the actual asset can read asset events of it normally, without considering the associated aliases. A downstream DAG can also depend on an asset alias. The authoring syntax is referencing the ``AssetAlias`` by name, and the associated asset events are picked up for scheduling. Note that a DAG can be triggered by a task with ``outlets=AssetAlias("xxx")`` if and only if the alias is resolved into ``Asset("s3://bucket/my-task")``. The DAG runs whenever a task with outlet ``AssetAlias("out")`` gets associated with at least one asset at runtime, regardless of the asset's identity. The downstream DAG is not triggered if no assets are associated to the alias for a particular given task run. This also means we can do conditional asset-triggering. + +The asset alias is resolved to the assets during DAG parsing. Thus, if the "min_file_process_interval" configuration is set to a high value, there is a possibility that the asset alias may not be resolved. To resolve this issue, you can trigger DAG parsing. + +.. code-block:: python + + with DAG(dag_id="asset-producer"): + + @task(outlets=[Asset("example-alias")]) + def produce_asset_events(): + pass + + + with DAG(dag_id="asset-alias-producer"): + + @task(outlets=[AssetAlias("example-alias")]) + def produce_asset_events(*, outlet_events): + outlet_events[AssetAlias("example-alias")].add(Asset("s3://bucket/my-task")) + + + with DAG(dag_id="asset-consumer", schedule=Asset("s3://bucket/my-task")): + ... + + with DAG(dag_id="asset-alias-consumer", schedule=AssetAlias("example-alias")): + ... + + +In the example provided, once the DAG ``asset-alias-producer`` is executed, the asset alias ``AssetAlias("example-alias")`` will be resolved to ``Asset("s3://bucket/my-task")``. However, the DAG ``asset-alias-consumer`` will have to wait for the next DAG re-parsing to update its schedule. To address this, Airflow will re-parse the dags relying on the asset alias ``AssetAlias("example-alias")`` when it's resolved into assets that these dags did not previously depend on. As a result, both the "asset-consumer" and "asset-alias-consumer" dags will be triggered after the execution of DAG ``asset-alias-producer``. + + +Combining asset and time-based schedules +------------------------------------------ + +AssetTimetable Integration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +You can schedule dags based on both asset events and time-based schedules using ``AssetOrTimeSchedule``. This allows you to create workflows when a DAG needs both to be triggered by data updates and run periodically according to a fixed timetable. + +For more detailed information on ``AssetOrTimeSchedule``, refer to the corresponding section in :ref:`AssetOrTimeSchedule `. diff --git a/airflow-core/docs/authoring-and-scheduling/assets.rst b/airflow-core/docs/authoring-and-scheduling/assets.rst index f7f018c6d97db..366a4069ca52b 100644 --- a/airflow-core/docs/authoring-and-scheduling/assets.rst +++ b/airflow-core/docs/authoring-and-scheduling/assets.rst @@ -229,6 +229,18 @@ The other way around also applies: def process_example_asset(example_asset): """Process inlet example_asset...""" +In addition, ``@asset`` can be used with ``@task`` to customize the task that generates the asset, +utilizing the modern TaskFlow approach described in :doc:`/tutorial/taskflow`. + +This combination allows you to set initial arguments for the task and to use various operators, such as the ``BashOperator``: + +.. code-block:: python + + @asset(schedule=None) + @task.bash(retries=3) + def example_asset(): + """Write to example_asset, from a Bash task with 3 retries...""" + return "echo 'run'" Output to multiple assets in one task ------------------------------------- @@ -280,7 +292,7 @@ The following example creates an asset event against the S3 URI ``f"s3://bucket/ .. code-block:: python - from airflow.sdk.definitions.asset import AssetAlias + from airflow.sdk import AssetAlias @task(outlets=[AssetAlias("my-task-outputs")]) @@ -292,19 +304,19 @@ The following example creates an asset event against the S3 URI ``f"s3://bucket/ .. code-block:: python - from airflow.sdk.definitions.asset.metadata import Metadata + from airflow.sdk import Metadata @task(outlets=[AssetAlias("my-task-outputs")]) def my_task_with_metadata(): s3_asset = Asset(uri="s3://bucket/my-task", name="example_s3") - yield Metadata(s3_asset, extra={"k": "v"}, alias="my-task-outputs") + yield Metadata(s3_asset, extra={"k": "v"}, alias=AssetAlias("my-task-outputs")) Only one asset event is emitted for an added asset, even if it is added to the alias multiple times, or added to multiple aliases. However, if different ``extra`` values are passed, it can emit multiple asset events. In the following example, two asset events will be emitted. .. code-block:: python - from airflow.sdk.definitions.asset import AssetAlias + from airflow.sdk import AssetAlias @task( diff --git a/airflow-core/docs/authoring-and-scheduling/connections.rst b/airflow-core/docs/authoring-and-scheduling/connections.rst index 2f24e3cc83b5a..7f9cbaa443ecc 100644 --- a/airflow-core/docs/authoring-and-scheduling/connections.rst +++ b/airflow-core/docs/authoring-and-scheduling/connections.rst @@ -38,7 +38,7 @@ A Hook is a high-level interface to an external platform that lets you quickly a They integrate with Connections to gather credentials, and many have a default ``conn_id``; for example, the :class:`~airflow.providers.postgres.hooks.postgres.PostgresHook` automatically looks for the Connection with a ``conn_id`` of ``postgres_default`` if you don't pass one in. -You can view a :ref:`full list of airflow hooks ` in our API documentation. +You can view a :ref:`full list of Airflow hooks ` in our API documentation. Custom connections ------------------ @@ -47,5 +47,5 @@ Airflow allows to define custom connection types. This is what is described in d :doc:`apache-airflow-providers:index` - providers give you the capability of defining your own connections. The connection customization can be done by any provider, but also many of the providers managed by the community define custom connection types. -The full list of all providers delivered by ``Apache Airflow community managed providers`` can be found in +The full list of all connections delivered by ``Apache Airflow community managed providers`` can be found in :doc:`apache-airflow-providers:core-extensions/connections`. diff --git a/airflow-core/docs/authoring-and-scheduling/datasets.rst b/airflow-core/docs/authoring-and-scheduling/datasets.rst deleted file mode 100644 index 1c8387600c411..0000000000000 --- a/airflow-core/docs/authoring-and-scheduling/datasets.rst +++ /dev/null @@ -1,316 +0,0 @@ - .. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - .. http://www.apache.org/licenses/LICENSE-2.0 - - .. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - -Asset-Aware Scheduling -====================== - -.. versionadded:: 2.4 - -Quickstart ----------- - -In addition to scheduling dags based on time, you can also schedule dags to run based on when a task updates an asset. - -.. code-block:: python - - from airflow.sdk import DAG, Asset - - with DAG(...): - MyOperator( - # this task updates example.csv - outlets=[Asset("s3://asset-bucket/example.csv")], - ..., - ) - - - with DAG( - # this DAG should be run when example.csv is updated (by dag1) - schedule=[Asset("s3://asset-bucket/example.csv")], - ..., - ): - ... - - -.. image:: /img/asset-scheduled-dags.png - -.. seealso:: - - :ref:`asset_definitions` for how to declare assets. - -Schedule dags with assets -------------------------- - -You can use assets to specify data dependencies in your dags. The following example shows how after the ``producer`` task in the ``producer`` DAG successfully completes, Airflow schedules the ``consumer`` DAG. Airflow marks an asset as ``updated`` only if the task completes successfully. If the task fails or if it is skipped, no update occurs, and Airflow doesn't schedule the ``consumer`` DAG. - -.. code-block:: python - - example_asset = Asset("s3://asset/example.csv") - - with DAG(dag_id="producer", ...): - BashOperator(task_id="producer", outlets=[example_asset], ...) - - with DAG(dag_id="consumer", schedule=[example_asset], ...): - ... - - -You can find a listing of the relationships between assets and dags in the -:ref:`Assets View` - -Multiple assets ------------------ - -Because the ``schedule`` parameter is a list, dags can require multiple assets. Airflow schedules a DAG after **all** assets the DAG consumes have been updated at least once since the last time the DAG ran: - -.. code-block:: python - - with DAG( - dag_id="multiple_assets_example", - schedule=[ - example_asset_1, - example_asset_2, - example_asset_3, - ], - ..., - ): - ... - - -If one asset is updated multiple times before all consumed assets update, the downstream DAG still only runs once, as shown in this illustration: - -.. :: - ASCII art representation of this diagram - - example_asset_1 x----x---x---x----------------------x- - example_asset_2 -------x---x-------x------x----x------ - example_asset_3 ---------------x-----x------x--------- - DAG runs created * * - -.. graphviz:: - - graph asset_event_timeline { - graph [layout=neato] - { - node [margin=0 fontcolor=blue width=0.1 shape=point label=""] - e1 [pos="1,2.5!"] - e2 [pos="2,2.5!"] - e3 [pos="2.5,2!"] - e4 [pos="4,2.5!"] - e5 [pos="5,2!"] - e6 [pos="6,2.5!"] - e7 [pos="7,1.5!"] - r7 [pos="7,1!" shape=star width=0.25 height=0.25 fixedsize=shape] - e8 [pos="8,2!"] - e9 [pos="9,1.5!"] - e10 [pos="10,2!"] - e11 [pos="11,1.5!"] - e12 [pos="12,2!"] - e13 [pos="13,2.5!"] - r13 [pos="13,1!" shape=star width=0.25 height=0.25 fixedsize=shape] - } - { - node [shape=none label="" width=0] - end_ds1 [pos="14,2.5!"] - end_ds2 [pos="14,2!"] - end_ds3 [pos="14,1.5!"] - } - - { - node [shape=none margin=0.25 fontname="roboto,sans-serif"] - example_asset_1 [ pos="-0.5,2.5!"] - example_asset_2 [ pos="-0.5,2!"] - example_asset_3 [ pos="-0.5,1.5!"] - dag_runs [label="DagRuns created" pos="-0.5,1!"] - } - - edge [color=lightgrey] - - example_asset_1 -- e1 -- e2 -- e4 -- e6 -- e13 -- end_ds1 - example_asset_2 -- e3 -- e5 -- e8 -- e10 -- e12 -- end_ds2 - example_asset_3 -- e7 -- e9 -- e11 -- end_ds3 - - } - -Fetching information from a triggering asset event ----------------------------------------------------- - -A triggered DAG can fetch information from the asset that triggered it using the ``triggering_asset_events`` template or parameter. See more at :ref:`templates-ref`. - -Example: - -.. code-block:: python - - example_snowflake_asset = Asset("snowflake://my_db/my_schema/my_table") - - with DAG(dag_id="load_snowflake_data", schedule="@hourly", ...): - SQLExecuteQueryOperator( - task_id="load", conn_id="snowflake_default", outlets=[example_snowflake_asset], ... - ) - - with DAG(dag_id="query_snowflake_data", schedule=[example_snowflake_asset], ...): - SQLExecuteQueryOperator( - task_id="query", - conn_id="snowflake_default", - sql=""" - SELECT * - FROM my_db.my_schema.my_table - WHERE "updated_at" >= '{{ (triggering_asset_events.values() | first | first).source_dag_run.data_interval_start }}' - AND "updated_at" < '{{ (triggering_asset_events.values() | first | first).source_dag_run.data_interval_end }}'; - """, - ) - - @task - def print_triggering_asset_events(triggering_asset_events=None): - for asset, asset_list in triggering_asset_events.items(): - print(asset, asset_list) - print(asset_list[0].source_dag_run.dag_id) - - print_triggering_asset_events() - -Note that this example is using `(.values() | first | first) `_ to fetch the first of one asset given to the DAG, and the first of one AssetEvent for that asset. An implementation can be quite complex if you have multiple assets, potentially with multiple AssetEvents. - - -Manipulating queued asset events through REST API ---------------------------------------------------- - -.. versionadded:: 2.9 - -In this example, the DAG ``waiting_for_asset_1_and_2`` will be triggered when tasks update both assets "asset-1" and "asset-2". Once "asset-1" is updated, Airflow creates a record. This ensures that Airflow knows to trigger the DAG when "asset-2" is updated. We call such records queued asset events. - -.. code-block:: python - - with DAG( - dag_id="waiting_for_asset_1_and_2", - schedule=[Asset("asset-1"), Asset("asset-2")], - ..., - ): - ... - - -``queuedEvent`` API endpoints are introduced to manipulate such records. - -* Get a queued asset event for a DAG: ``/assets/queuedEvent/{uri}`` -* Get queued asset events for a DAG: ``/dags/{dag_id}/assets/queuedEvent`` -* Delete a queued asset event for a DAG: ``/assets/queuedEvent/{uri}`` -* Delete queued asset events for a DAG: ``/dags/{dag_id}/assets/queuedEvent`` -* Get queued asset events for an asset: ``/dags/{dag_id}/assets/queuedEvent/{uri}`` -* Delete queued asset events for an asset: ``DELETE /dags/{dag_id}/assets/queuedEvent/{uri}`` - - For how to use REST API and the parameters needed for these endpoints, please refer to :doc:`Airflow API `. - -Advanced asset scheduling with conditional expressions --------------------------------------------------------- - -Apache Airflow includes advanced scheduling capabilities that use conditional expressions with assets. This feature allows you to define complex dependencies for DAG executions based on asset updates, using logical operators for more control on workflow triggers. - -Logical operators for assets -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Airflow supports two logical operators for combining asset conditions: - -- **AND (``&``)**: Specifies that the DAG should be triggered only after all of the specified assets have been updated. -- **OR (``|``)**: Specifies that the DAG should be triggered when any of the specified assets is updated. - -These operators enable you to configure your Airflow workflows to use more complex asset update conditions, making them more dynamic and flexible. - -Example Use -------------- - -**Scheduling based on multiple asset updates** - -To schedule a DAG to run only when two specific assets have both been updated, use the AND operator (``&``): - -.. code-block:: python - - dag1_asset = Asset("s3://dag1/output_1.txt") - dag2_asset = Asset("s3://dag2/output_1.txt") - - with DAG( - # Consume asset 1 and 2 with asset expressions - schedule=(dag1_asset & dag2_asset), - ..., - ): - ... - -**Scheduling based on any asset update** - -To trigger a DAG execution when either one of two assets is updated, apply the OR operator (``|``): - -.. code-block:: python - - with DAG( - # Consume asset 1 or 2 with asset expressions - schedule=(dag1_asset | dag2_asset), - ..., - ): - ... - -**Complex Conditional Logic** - -For scenarios requiring more intricate conditions, such as triggering a DAG when one asset is updated or when both of two other assets are updated, combine the OR and AND operators: - -.. code-block:: python - - dag3_asset = Asset("s3://dag3/output_3.txt") - - with DAG( - # Consume asset 1 or both 2 and 3 with asset expressions - schedule=(dag1_asset | (dag2_asset & dag3_asset)), - ..., - ): - ... - - -Scheduling based on asset aliases -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Since asset events added to an alias are just simple asset events, a downstream DAG depending on the actual asset can read asset events of it normally, without considering the associated aliases. A downstream DAG can also depend on an asset alias. The authoring syntax is referencing the ``AssetAlias`` by name, and the associated asset events are picked up for scheduling. Note that a DAG can be triggered by a task with ``outlets=AssetAlias("xxx")`` if and only if the alias is resolved into ``Asset("s3://bucket/my-task")``. The DAG runs whenever a task with outlet ``AssetAlias("out")`` gets associated with at least one asset at runtime, regardless of the asset's identity. The downstream DAG is not triggered if no assets are associated to the alias for a particular given task run. This also means we can do conditional asset-triggering. - -The asset alias is resolved to the assets during DAG parsing. Thus, if the "min_file_process_interval" configuration is set to a high value, there is a possibility that the asset alias may not be resolved. To resolve this issue, you can trigger DAG parsing. - -.. code-block:: python - - with DAG(dag_id="asset-producer"): - - @task(outlets=[Asset("example-alias")]) - def produce_asset_events(): - pass - - - with DAG(dag_id="asset-alias-producer"): - - @task(outlets=[AssetAlias("example-alias")]) - def produce_asset_events(*, outlet_events): - outlet_events[AssetAlias("example-alias")].add(Asset("s3://bucket/my-task")) - - - with DAG(dag_id="asset-consumer", schedule=Asset("s3://bucket/my-task")): - ... - - with DAG(dag_id="asset-alias-consumer", schedule=AssetAlias("example-alias")): - ... - - -In the example provided, once the DAG ``asset-alias-producer`` is executed, the asset alias ``AssetAlias("example-alias")`` will be resolved to ``Asset("s3://bucket/my-task")``. However, the DAG ``asset-alias-consumer`` will have to wait for the next DAG re-parsing to update its schedule. To address this, Airflow will re-parse the dags relying on the asset alias ``AssetAlias("example-alias")`` when it's resolved into assets that these dags did not previously depend on. As a result, both the "asset-consumer" and "asset-alias-consumer" dags will be triggered after the execution of DAG ``asset-alias-producer``. - - -Combining asset and time-based schedules ------------------------------------------- - -AssetTimetable Integration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can schedule dags based on both asset events and time-based schedules using ``AssetOrTimeSchedule``. This allows you to create workflows when a DAG needs both to be triggered by data updates and run periodically according to a fixed timetable. - -For more detailed information on ``AssetOrTimeSchedule``, refer to the corresponding section in :ref:`AssetOrTimeSchedule `. diff --git a/airflow-core/docs/authoring-and-scheduling/deferring.rst b/airflow-core/docs/authoring-and-scheduling/deferring.rst index 68933af157d74..b34b33295ea62 100644 --- a/airflow-core/docs/authoring-and-scheduling/deferring.rst +++ b/airflow-core/docs/authoring-and-scheduling/deferring.rst @@ -31,19 +31,19 @@ An overview of how this process works: * The trigger runs until it fires, at which point its source task is re-scheduled by the scheduler. * The scheduler queues the task to resume on a worker node. -You can either use pre-written deferrable operators as a DAG author or write your own. Writing them, however, requires that they meet certain design criteria. +You can either use pre-written deferrable operators as a Dag author or write your own. Writing them, however, requires that they meet certain design criteria. Using Deferrable Operators -------------------------- -If you want to use pre-written deferrable operators that come with Airflow, such as ``TimeSensorAsync``, then you only need to complete two steps: +If you want to use pre-written deferrable operators that come with Airflow, such as ``TimeSensor``, then you only need to complete two steps: * Ensure your Airflow installation runs at least one ``triggerer`` process, as well as the normal ``scheduler`` * Use deferrable operators/sensors in your dags Airflow automatically handles and implements the deferral processes for you. -If you're upgrading existing dags to use deferrable operators, Airflow contains API-compatible sensor variants, like ``TimeSensorAsync`` for ``TimeSensor``. Add these variants into your DAG to use deferrable operators with no other changes required. +If you're upgrading existing dags to use deferrable operators, Airflow contains API-compatible sensor variants. Add these variants into your dag to use deferrable operators with no other changes required. Note that you can't use the deferral ability from inside custom PythonOperator or TaskFlow Python functions. Deferral is only available to traditional, class-based operators. @@ -113,7 +113,7 @@ This example shows the structure of a basic trigger, a very simplified version o import asyncio from airflow.triggers.base import BaseTrigger, TriggerEvent - from airflow.utils import timezone + from airflow.sdk.timezone import utcnow class DateTimeTrigger(BaseTrigger): @@ -125,7 +125,7 @@ This example shows the structure of a basic trigger, a very simplified version o return ("airflow.providers.standard.triggers.temporal.DateTimeTrigger", {"moment": self.moment}) async def run(self): - while self.moment > timezone.utcnow(): + while self.moment > utcnow(): await asyncio.sleep(1) yield TriggerEvent(self.moment) diff --git a/airflow-core/docs/authoring-and-scheduling/dynamic-task-mapping.rst b/airflow-core/docs/authoring-and-scheduling/dynamic-task-mapping.rst index d25f1204bd95b..e254bf52071f3 100644 --- a/airflow-core/docs/authoring-and-scheduling/dynamic-task-mapping.rst +++ b/airflow-core/docs/authoring-and-scheduling/dynamic-task-mapping.rst @@ -21,7 +21,7 @@ Dynamic Task Mapping ==================== -Dynamic Task Mapping allows a way for a workflow to create a number of tasks at runtime based upon current data, rather than the DAG author having to know in advance how many tasks would be needed. +Dynamic Task Mapping allows a way for a workflow to create a number of tasks at runtime based upon current data, rather than the Dag author having to know in advance how many tasks would be needed. This is similar to defining your tasks in a for loop, but instead of having the DAG file fetch the data and do that itself, the scheduler can do this based on the output of a previous task. Right before a mapped task is executed the scheduler will create *n* copies of the task, one for each input. @@ -41,11 +41,11 @@ This will show ``Total was 9`` in the task logs when executed. This is the resulting DAG structure: -.. image:: /img/mapping-simple-graph.png +.. image:: /img/ui-light/mapping_simple_graph.png The grid view also provides visibility into your mapped tasks in the details panel: -.. image:: /img/mapping-simple-grid.png +.. image:: /img/ui-dark/grid_mapped_task.png .. note:: Only keyword arguments are allowed to be passed to ``expand()``. @@ -203,7 +203,7 @@ Since the template is rendered after the main execution block, it is possible to .. code-block:: python - from airflow.providers.standard.operators.python import get_current_context + from airflow.sdk import get_current_context @task(map_index_template="{{ my_variable }}") diff --git a/airflow-core/docs/authoring-and-scheduling/event-scheduling.rst b/airflow-core/docs/authoring-and-scheduling/event-scheduling.rst index e6bcbe4ae869d..8a81f01461c07 100644 --- a/airflow-core/docs/authoring-and-scheduling/event-scheduling.rst +++ b/airflow-core/docs/authoring-and-scheduling/event-scheduling.rst @@ -25,7 +25,7 @@ predefined time-based schedules. This is particularly useful in modern data architectures where workflows need to react to real-time data changes, messages, or system signals. -By using assets, as described in :doc:`datasets`, you can configure dags to start execution when specific external events +By using assets, as described in :doc:`asset-scheduling`, you can configure dags to start execution when specific external events occur. Assets provide a mechanism to establish dependencies between external events and DAG execution, ensuring that workflows react dynamically to changes in the external environment. @@ -55,7 +55,7 @@ main scenarios for working with triggers in this context: 1. **Creating a new event-driven trigger**: If you need a new trigger for an unsupported event source, you should create a new class inheriting from ``BaseEventTrigger`` and implement its logic. -2. **Adapting an existing compatible trigger**: If an existing trigger (inheriting from ``BaseEvent``) is proven to be +2. **Adapting an existing compatible trigger**: If an existing trigger (inheriting from ``BaseTrigger``) is proven to be already compatible with event-driven scheduling, then you just need to change the base class from ``BaseTrigger`` to ``BaseEventTrigger``. diff --git a/airflow-core/docs/authoring-and-scheduling/index.rst b/airflow-core/docs/authoring-and-scheduling/index.rst index fab495298c45c..beefa12e5cc9c 100644 --- a/airflow-core/docs/authoring-and-scheduling/index.rst +++ b/airflow-core/docs/authoring-and-scheduling/index.rst @@ -18,7 +18,7 @@ Authoring and Scheduling ========================= -Here you can find detailed documentation about advanced authoring and scheduling airflow dags. +Here you can find detailed documentation about advanced authoring and scheduling Airflow dags. It's recommended that you first review the pages in :doc:`core concepts ` .. _authoring-section: @@ -43,6 +43,6 @@ It's recommended that you first review the pages in :doc:`core concepts + Asset-Aware Scheduling timetable Event-Driven Scheduling diff --git a/airflow-core/docs/authoring-and-scheduling/timetable.rst b/airflow-core/docs/authoring-and-scheduling/timetable.rst index e43f99e2c6211..163366b3bab0d 100644 --- a/airflow-core/docs/authoring-and-scheduling/timetable.rst +++ b/airflow-core/docs/authoring-and-scheduling/timetable.rst @@ -312,17 +312,16 @@ Whether taking care of *Data Interval* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A trigger timetable *does not* include *data interval*. This means that the value of ``data_interval_start`` -and ``data_interval_end`` (and the legacy ``execution_date``) are the same; the time when a DAG run is -triggered. +and ``data_interval_end`` are the same; the time when a DAG run is triggered. -For a data interval timetable, the value of ``data_interval_start`` and ``data_interval_end`` (and legacy -``execution_date``) are different. ``data_interval_start`` is the time when a DAG run is triggered and -``data_interval_end`` is the end of the interval. +For a data interval timetable, the value of ``data_interval_start`` and ``data_interval_end`` are different. +``data_interval_end`` is the time when a DAG run is triggered, while ``data_interval_start`` is the start of the interval. *Catchup* behavior ^^^^^^^^^^^^^^^^^^ By default, ``catchup`` is set to ``False``. This prevents running unnecessary dags in the following scenarios: + - If you create a new DAG with a start date in the past, and don't want to run dags for the past. If ``catchup`` is ``True``, Airflow runs all dags that would have run in that time interval. - If you pause an existing DAG, and then restart it at a later date, ``catchup`` being ``False`` means that Airflow does not run the dags that would have run during the paused period. @@ -343,12 +342,14 @@ Both trigger and data interval timetables trigger DAG runs at the same time. How For example, suppose there is a cron expression ``@daily`` or ``0 0 * * *``, which is scheduled to run at 12AM every day. If you enable dags using the two timetables at 3PM on January 31st, + - `CronTriggerTimetable`_ creates a new DAG run at 12AM on February 1st. The ``run_id`` timestamp is midnight, on February 1st. - `CronDataIntervalTimetable`_ immediately creates a new DAG run, because a DAG run for the daily time interval beginning at 12AM on January 31st did not occur yet. The ``run_id`` timestamp is midnight, on January 31st, since that is the beginning of the data interval. The following is another example showing the difference in the case of skipping DAG runs: Suppose there are two running dags with a cron expression ``@daily`` or ``0 0 * * *`` that use the two different timetables. If you pause the dags at 3PM on January 31st and re-enable them at 3PM on February 2nd, + - `CronTriggerTimetable`_ skips the DAG runs that were supposed to trigger on February 1st and 2nd. The next DAG run will be triggered at 12AM on February 3rd. - `CronDataIntervalTimetable`_ skips the DAG runs that were supposed to trigger on February 1st only. A DAG run for February 2nd is immediately triggered after you re-enable the DAG. diff --git a/airflow-core/docs/authoring-and-scheduling/timezone.rst b/airflow-core/docs/authoring-and-scheduling/timezone.rst index e192bd2371cf1..20b3219472e2b 100644 --- a/airflow-core/docs/authoring-and-scheduling/timezone.rst +++ b/airflow-core/docs/authoring-and-scheduling/timezone.rst @@ -75,7 +75,7 @@ Because Airflow uses time zone aware datetime objects. If your code creates date .. code-block:: python - from airflow.utils import timezone + from airflow.sdk import timezone now = timezone.utcnow() a_date = timezone.datetime(2017, 1, 1) diff --git a/airflow-core/docs/best-practices.rst b/airflow-core/docs/best-practices.rst index 9f596a9d257bc..839c95dbfe94f 100644 --- a/airflow-core/docs/best-practices.rst +++ b/airflow-core/docs/best-practices.rst @@ -296,18 +296,12 @@ When you execute that code you will see: This means that the ``get_array`` is not executed as top-level code, but ``get_task_id`` is. -.. _best_practices/dynamic_dag_generation: - Code Quality and Linting ------------------------ Maintaining high code quality is essential for the reliability and maintainability of your Airflow workflows. Utilizing linting tools can help identify potential issues and enforce coding standards. One such tool is ``ruff``, a fast Python linter that now includes specific rules for Airflow. -ruff assists in detecting deprecated features and patterns that may affect your migration to Airflow 3.0. For instance, it includes rules prefixed with ``AIR`` to flag potential issues: - -- **AIR301**: Flags DAGs without an explicit ``schedule`` argument. -- **AIR302**: Identifies usage of deprecated ``schedule_interval`` parameter. -- **AIR303**: Detects imports from modules that have been relocated or removed in Airflow 3.0. +ruff assists in detecting deprecated features and patterns that may affect your migration to Airflow 3.0. For instance, it includes rules prefixed with ``AIR`` to flag potential issues. The full list is detailed in `Airflow (AIR) `_. Installing and Using ruff ------------------------- @@ -316,13 +310,13 @@ Installing and Using ruff .. code-block:: bash - pip install "ruff>=0.9.5" + pip install "ruff>=0.11.6" 2. **Running ruff**: Execute ``ruff`` to check your dags for potential issues: .. code-block:: bash - ruff check dags/ --select AIR301,AIR302,AIR303 + ruff check dags/ --select AIR3 --preview This command will analyze your dags located in the ``dags/`` directory and report any issues related to the specified rules. @@ -355,6 +349,7 @@ By integrating ``ruff`` into your development workflow, you can proactively addr For more information on ``ruff`` and its integration with Airflow, refer to the `official Airflow documentation `_. +.. _best_practices/dynamic_dag_generation: Dynamic DAG Generation ---------------------- @@ -451,7 +446,7 @@ for any variable that contains sensitive data. Timetables ---------- -Avoid using Airflow Variables/Connections or accessing airflow database at the top level of your timetable code. +Avoid using Airflow Variables/Connections or accessing Airflow database at the top level of your timetable code. Database access should be delayed until the execution time of the DAG. This means that you should not have variables/connections retrieval as argument to your timetable class initialization or have Variable/connection at the top level of your custom timetable module. @@ -776,52 +771,28 @@ This is an example test want to verify the structure of a code-generated DAG aga .. code-block:: python - import datetime - import pendulum - import pytest from airflow.sdk import DAG - from airflow.utils.state import DagRunState, TaskInstanceState - from airflow.utils.types import DagRunTriggeredByType, DagRunType - - DATA_INTERVAL_START = pendulum.datetime(2021, 9, 13, tz="UTC") - DATA_INTERVAL_END = DATA_INTERVAL_START + datetime.timedelta(days=1) + from airflow.utils.state import TaskInstanceState - TEST_DAG_ID = "my_custom_operator_dag" - TEST_TASK_ID = "my_custom_operator_task" - TEST_RUN_ID = "my_custom_operator_dag_run" - - @pytest.fixture() - def dag(): + def test_my_custom_operator_execute_no_trigger(dag): + TEST_TASK_ID = "my_custom_operator_task" with DAG( - dag_id=TEST_DAG_ID, + dag_id="my_custom_operator_dag", schedule="@daily", - start_date=DATA_INTERVAL_START, + start_date=pendulum.datetime(2021, 9, 13, tz="UTC"), ) as dag: MyCustomOperator( task_id=TEST_TASK_ID, prefix="s3://bucket/some/prefix", ) - return dag - - def test_my_custom_operator_execute_no_trigger(dag): - dagrun = dag.create_dagrun( - run_id=TEST_RUN_ID, - logical_date=DATA_INTERVAL_START, - data_interval=(DATA_INTERVAL_START, DATA_INTERVAL_END), - run_type=DagRunType.MANUAL, - triggered_by=DagRunTriggeredByType.TIMETABLE, - state=DagRunState.RUNNING, - start_date=DATA_INTERVAL_END, - ) + dagrun = dag.test() ti = dagrun.get_task_instance(task_id=TEST_TASK_ID) - ti.task = dag.get_task(task_id=TEST_TASK_ID) - ti.run(ignore_ti_state=True) assert ti.state == TaskInstanceState.SUCCESS - # Assert something related to tasks results. + # Assert something related to tasks results: ti.xcom_pull() Self-Checks @@ -980,15 +951,15 @@ The benefits of the operator are: * There is no need to prepare the venv upfront. It will be dynamically created before task is run, and removed after it is finished, so there is nothing special (except having virtualenv package in your - airflow dependencies) to make use of multiple virtual environments + Airflow dependencies) to make use of multiple virtual environments * You can run tasks with different sets of dependencies on the same workers - thus Memory resources are reused (though see below about the CPU overhead involved in creating the venvs). -* In bigger installations, DAG Authors do not need to ask anyone to create the venvs for you. - As a DAG Author, you only have to have virtualenv dependency installed and you can specify and modify the +* In bigger installations, Dag authors do not need to ask anyone to create the venvs for you. + As a Dag author, you only have to have virtualenv dependency installed and you can specify and modify the environments as you see fit. * No changes in deployment requirements - whether you use Local virtualenv, or Docker, or Kubernetes, the tasks will work without adding anything to your deployment. -* No need to learn more about containers, Kubernetes as a DAG Author. Only knowledge of Python requirements +* No need to learn more about containers, Kubernetes as a Dag author. Only knowledge of Python requirements is required to author dags this way. There are certain limitations and overhead introduced by this operator: @@ -1015,7 +986,7 @@ There are certain limitations and overhead introduced by this operator: same worker might be affected by previous tasks creating/modifying files etc. You can see detailed examples of using :class:`airflow.providers.standard.operators.python.PythonVirtualenvOperator` in -:ref:`this section in the Taskflow API tutorial `. +:ref:`this section in the TaskFlow API tutorial `. Using ExternalPythonOperator @@ -1034,7 +1005,7 @@ and available in all the workers in case your Airflow runs in a distributed envi This way you avoid the overhead and problems of re-creating the virtual environment but they have to be prepared and deployed together with Airflow installation. Usually people who manage Airflow installation -need to be involved, and in bigger installations those are usually different people than DAG Authors +need to be involved, and in bigger installations those are usually different people than Dag authors (DevOps/System Admins). Those virtual environments can be prepared in various ways - if you use LocalExecutor they just need to be installed @@ -1053,7 +1024,7 @@ The benefits of the operator are: be added dynamically. This is good for both, security and stability. * Limited impact on your deployment - you do not need to switch to Docker containers or Kubernetes to make a good use of the operator. -* No need to learn more about containers, Kubernetes as a DAG Author. Only knowledge of Python, requirements +* No need to learn more about containers, Kubernetes as a Dag author. Only knowledge of Python, requirements is required to author dags this way. The drawbacks: @@ -1074,7 +1045,7 @@ The drawbacks: same worker might be affected by previous tasks creating/modifying files etc. You can think about the ``PythonVirtualenvOperator`` and ``ExternalPythonOperator`` as counterparts - -that make it smoother to move from development phase to production phase. As a DAG author you'd normally +that make it smoother to move from development phase to production phase. As a Dag author you'd normally iterate with dependencies and develop your DAG using ``PythonVirtualenvOperator`` (thus decorating your tasks with ``@task.virtualenv`` decorators) while after the iteration and changes you would likely want to change it for production to switch to the ``ExternalPythonOperator`` (and ``@task.external_python``) @@ -1083,7 +1054,7 @@ The nice thing about this is that you can switch the decorator back at any time developing it "dynamically" with ``PythonVirtualenvOperator``. You can see detailed examples of using :class:`airflow.providers.standard.operators.python.ExternalPythonOperator` in -:ref:`Taskflow External Python example ` +:ref:`TaskFlow External Python example ` Using DockerOperator or Kubernetes Pod Operator ----------------------------------------------- @@ -1147,9 +1118,9 @@ The drawbacks: containers etc. in order to author a DAG that uses those operators. You can see detailed examples of using :class:`airflow.operators.providers.Docker` in -:ref:`Taskflow Docker example ` +:ref:`TaskFlow Docker example ` and :class:`airflow.providers.cncf.kubernetes.operators.pod.KubernetesPodOperator` -:ref:`Taskflow Kubernetes example ` +:ref:`TaskFlow Kubernetes example ` Using multiple Docker Images and Celery Queues ---------------------------------------------- diff --git a/airflow-core/docs/cli-and-env-variables-ref.rst b/airflow-core/docs/cli-and-env-variables-ref.rst index b679bc7b7986c..6336396911baf 100644 --- a/airflow-core/docs/cli-and-env-variables-ref.rst +++ b/airflow-core/docs/cli-and-env-variables-ref.rst @@ -77,7 +77,7 @@ Environment Variables * ``result_backend`` in ``[celery]`` section * ``password`` in ``[atlas]`` section * ``smtp_password`` in ``[smtp]`` section -* ``secret_key`` in ``[webserver]`` section +* ``secret_key`` in ``[api]`` section .. envvar:: AIRFLOW__{SECTION}__{KEY}_SECRET diff --git a/airflow-core/docs/conf.py b/airflow-core/docs/conf.py index 43026bb39f258..c9886a172b328 100644 --- a/airflow-core/docs/conf.py +++ b/airflow-core/docs/conf.py @@ -67,8 +67,6 @@ SYSTEM_TESTS_DIR: pathlib.Path | None SYSTEM_TESTS_DIR = AIRFLOW_REPO_ROOT_PATH / "airflow-core" / "tests" / "system" / "core" -conf_py_path = f"/docs/{PACKAGE_NAME}/" - os.environ["AIRFLOW_PACKAGE_NAME"] = PACKAGE_NAME # Hack to allow changing for piece of the code to behave differently while @@ -122,7 +120,6 @@ PACKAGES_THAT_WE_SHOULD_ADD_TO_API_DOCS = { "hooks", - "decorators", "example_dags", "executors", "operators", @@ -142,15 +139,7 @@ MODELS_THAT_SHOULD_BE_INCLUDED_IN_API_DOCS: set[str] = { "baseoperator.py", - "connection.py", - "dag.py", - "dagrun.py", - "dagbag.py", "param.py", - "taskinstance.py", - "taskinstancekey.py", - "variable.py", - "xcom.py", } @@ -200,7 +189,7 @@ def add_airflow_core_exclude_patterns_to_sphinx(exclude_patterns: list[str]): # a list of builtin themes. html_theme = "sphinx_airflow_theme" -html_title = "Airflow Documentation" +html_title = f"Airflow {PACKAGE_VERSION} Documentation" # A shorter title for the navigation bar. Default is the same as html_title. html_short_title = "" @@ -223,6 +212,7 @@ def add_airflow_core_exclude_patterns_to_sphinx(exclude_patterns: list[str]): "installation/installing-from-sources.html", "administration-and-deployment/logging-monitoring/advanced-logging-configuration.html", "howto/docker-compose/index.html", + "security/sbom.html", ] html_css_files = ["custom.css"] @@ -238,6 +228,7 @@ def add_airflow_core_exclude_patterns_to_sphinx(exclude_patterns: list[str]): # html theme options html_theme_options: dict[str, Any] = get_html_theme_options() +conf_py_path = "/airflow-core/docs/" # A dictionary of values to pass into the template engine's context for all pages. html_context = get_html_context(conf_py_path) @@ -263,6 +254,13 @@ def add_airflow_core_exclude_patterns_to_sphinx(exclude_patterns: list[str]): }, } +# Use for generate rst_epilog and other post-generation substitutions +global_substitutions = { + "version": PACKAGE_VERSION, + "airflow-version": airflow.__version__, + "experimental": "This is an :ref:`experimental feature `.", +} + # -- Options for sphinx.ext.autodoc -------------------------------------------- # See: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html @@ -343,8 +341,8 @@ def add_airflow_core_exclude_patterns_to_sphinx(exclude_patterns: list[str]): graphviz_output_format = "svg" -main_openapi_path = Path(main_openapi_file).parent.joinpath("v1-rest-api-generated.yaml") -sam_openapi_path = Path(sam_openapi_file).parent.joinpath("v1-simple-auth-manager-generated.yaml") +main_openapi_path = Path(main_openapi_file).parent.joinpath("v2-rest-api-generated.yaml") +sam_openapi_path = Path(sam_openapi_file).parent.joinpath("v2-simple-auth-manager-generated.yaml") redoc = [ { "name": "Simple auth manager token API", @@ -360,7 +358,6 @@ def add_airflow_core_exclude_patterns_to_sphinx(exclude_patterns: list[str]): "spec": main_openapi_path.as_posix(), "opts": { "hide-hostname": True, - "no-auto-auth": True, }, }, ] diff --git a/airflow-core/docs/configurations-ref.rst b/airflow-core/docs/configurations-ref.rst index 2d6b9cc3cc8ea..83c5d8a8ed51a 100644 --- a/airflow-core/docs/configurations-ref.rst +++ b/airflow-core/docs/configurations-ref.rst @@ -24,12 +24,12 @@ can set in ``airflow.cfg`` file or using environment variables. Use the same configuration across all the Airflow components. While each component does not require all, some configurations need to be same otherwise they would not -work as expected. A good example for that is :ref:`secret_key` which +work as expected. A good example for that is :ref:`secret_key` which should be same on the Webserver and Worker to allow Webserver to fetch logs from Worker. The webserver key is also used to authorize requests to Celery workers when logs are retrieved. The token generated using the secret key has a short expiry time though - make sure that time on ALL the machines -that you run airflow components on is synchronized (for example using ntpd) otherwise you might get +that you run Airflow components on is synchronized (for example using ntpd) otherwise you might get "forbidden" errors when the logs are accessed. .. note:: diff --git a/airflow-core/docs/core-concepts/auth-manager/index.rst b/airflow-core/docs/core-concepts/auth-manager/index.rst index 417bad407db1f..845d03b03d010 100644 --- a/airflow-core/docs/core-concepts/auth-manager/index.rst +++ b/airflow-core/docs/core-concepts/auth-manager/index.rst @@ -51,10 +51,7 @@ Provided by Airflow: * :doc:`simple/index` -Provided by providers: - -* :doc:`apache-airflow-providers-fab:auth-manager/index` -* :doc:`apache-airflow-providers-amazon:auth-manager/index` +Provided by providers. The list of supported auth managers is available in :doc:`apache-airflow-providers:core-extensions/auth-managers`. Why pluggable auth managers? ---------------------------- @@ -66,7 +63,7 @@ users does not need the same user management as an environment used by thousand This is why the whole user management (user authentication and user authorization) is packaged in one component called auth manager. So that it is easy to plug-and-play an auth manager that suits your specific needs. -By default, Airflow comes with the :doc:`apache-airflow-providers-fab:auth-manager/index`. +By default, Airflow comes with the :doc:`simple/index`. .. note:: Switching to a different auth manager is a heavy operation and should be considered as such. It will @@ -94,14 +91,28 @@ Some reasons you may want to write a custom auth manager include: * You'd like to use an auth manager that leverages an identity provider from your preferred cloud provider. * You have a private user management tool that is only available to you or your organization. -Authentication related BaseAuthManager methods -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +User representation +^^^^^^^^^^^^^^^^^^^ + +:class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager` defines an authentication manager, +parameterized by a user class T representing the authenticated user type. +Auth manager implementations (subclasses of :class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager`) +should specify the associated concrete user type. Each auth manager has its own user type definition. +Concrete user types should be subclass of :class:`~airflow.api_fastapi.auth.managers.models.base_user.BaseUser`. + +Authentication related methods +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -* ``get_user``: Return the signed-in user. * ``get_url_login``: Return the URL the user is redirected to for signing in. +* ``get_url_logout``: Return the URL the user is redirected to when logging out. This is an optional method, + this redirection is usually needed to invalidate resources when logging out, such as a session. +* ``serialize_user``: Serialize a user instance to a dict. This dict is the actual content of the JWT token. + It should contain all the information needed to identify the user and make an authorization request. +* ``deserialize_user``: Create a user instance from a dict. The dict is the payload of the JWT token. + This is the same dict returned by ``serialize_user``. -Authorization related BaseAuthManager methods -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Authorization related methods +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Most of authorization methods in :class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager` look the same. Let's go over the different parameters used by most of these methods. @@ -112,11 +123,11 @@ Let's go over the different parameters used by most of these methods. * ``POST``: Can the user create a resource? * ``PUT``: Can the user modify the resource? * ``DELETE``: Can the user delete the resource? - * ``MENU``: Can the user see the resource in the menu? * ``details``: Optional details about the resource being accessed. * ``user``: The user trying to access the resource. + These authorization methods are: * ``is_authorized_configuration``: Return whether the user is authorized to access Airflow configuration. Some details about the configuration can be provided (e.g. the config section). @@ -125,22 +136,28 @@ These authorization methods are: Also, ``is_authorized_dag`` is called for any entity related to dags (e.g. task instances, dag runs, ...). This information is passed in ``access_entity``. Example: ``auth_manager.is_authorized_dag(method="GET", access_entity=DagAccessEntity.Run, details=DagDetails(id="dag-1"))`` asks whether the user has permission to read the Dag runs of the dag "dag-1". -* ``is_authorized_dataset``: Return whether the user is authorized to access Airflow datasets. Some details about the dataset can be provided (e.g. the dataset uri). +* ``is_authorized_backfill``: Return whether the user is authorized to access Airflow backfills. Some details about the backfill can be provided (e.g. the backfill ID). +* ``is_authorized_asset``: Return whether the user is authorized to access Airflow assets. Some details about the asset can be provided (e.g. the asset ID). +* ``is_authorized_asset_alias``: Return whether the user is authorized to access Airflow asset aliases. Some details about the asset alias can be provided (e.g. the asset alias ID). * ``is_authorized_pool``: Return whether the user is authorized to access Airflow pools. Some details about the pool can be provided (e.g. the pool name). * ``is_authorized_variable``: Return whether the user is authorized to access Airflow variables. Some details about the variable can be provided (e.g. the variable key). * ``is_authorized_view``: Return whether the user is authorized to access a specific view in Airflow. The view is specified through ``access_view`` (e.g. ``AccessView.CLUSTER_ACTIVITY``). * ``is_authorized_custom_view``: Return whether the user is authorized to access a specific view not defined in Airflow. This view can be provided by the auth manager itself or a plugin defined by the user. +* ``filter_authorized_menu_items``: Given the list of menu items in the UI, return the list of menu items the user has access to. + +It should be noted that the ``method`` parameter listed above may only have relevance for a specific subset of the auth manager's authorization methods. +For example, the ``configuration`` resource is by definition read-only, so only the ``GET`` parameter is relevant in the context of ``is_authorized_configuration``. JWT token management by auth managers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The auth manager is responsible for creating the JWT token needed to interact with Airflow public API. -To achieve this, the auth manager **must** provide an endpoint to create this JWT token. This endpoint must be -available at ``POST /auth/token`` +To achieve this, the auth manager **must** provide an endpoint to create this JWT token. This endpoint is usually +available at ``POST /auth/token``. +Please double check the auth manager documentation to find the accurate token generation endpoint. -The auth manager is also responsible of passing the JWT token to Airflow UI. The protocol to exchange the JWT +The auth manager is also responsible for passing the JWT token to the Airflow UI. The protocol to exchange the JWT token between the auth manager and Airflow UI is using cookies. The auth manager needs to save the JWT token in a -cookie named ``_token`` before redirecting to the Airflow UI. The Airflow UI will then read the cookie, save it and -delete the cookie. +cookie named ``_token`` before redirecting to the Airflow UI. The Airflow UI will then read the cookie, save it, and delete it. .. code-block:: python @@ -148,7 +165,7 @@ delete the cookie. response = RedirectResponse(url="/") - secure = conf.has_option("api", "ssl_cert") + secure = request.base_url.scheme == "https" or bool(conf.get("api", "ssl_cert", fallback="")) response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure) return response @@ -162,11 +179,11 @@ Optional methods recommended to override for optimization The following methods aren't required to override to have a functional Airflow auth manager. However, it is recommended to override these to make your auth manager faster (and potentially less costly): * ``batch_is_authorized_dag``: Batch version of ``is_authorized_dag``. If not overridden, it will call ``is_authorized_dag`` for every single item. -* ``batch_is_authorized_connection``: Batch version of ``is_authorized_connection``. If not overridden, it will call ``is_authorized_connection`` for every single item. -* ``batch_is_authorized_pool``: Batch version of ``is_authorized_pool``. If not overridden, it will call ``is_authorized_pool`` for every single item. -* ``batch_is_authorized_variable``: Batch version of ``is_authorized_variable``. If not overridden, it will call ``is_authorized_variable`` for every single item. * ``get_authorized_dag_ids``: Return the list of DAG IDs the user has access to. If not overridden, it will call ``is_authorized_dag`` for every single DAG available in the environment. + * Note: To filter the results of ``get_authorized_dag_ids``, it is recommended that you define the filtering logic in your ``filter_authorized_dag_ids`` method. For example, this may be useful if you rely on per-DAG access controls derived from one or more fields on a given DAG (e.g. DAG tags). + * This method requires an active session with the Airflow metadata database. As such, overriding the ``get_authorized_dag_ids`` method is an advanced use case, which should be considered carefully -- it is recommended you refer to the :doc:`../../database-erd-ref`. + CLI ^^^ @@ -207,6 +224,41 @@ To extend the API server application, you need to implement the ``get_fastapi_ap Such additional endpoints can be used to manage resources such as users, groups, roles (if any) handled by your auth manager. Endpoints defined by ``get_fastapi_app`` are mounted in ``/auth``. +Other optional methods +^^^^^^^^^^^^^^^^^^^^^^ + +* ``init``: This method is executed when Airflow is initializing. + Override this method if you need to make any action (e.g. create resources, API call) that the auth manager needs. +* ``get_extra_menu_items``: Provide additional links to be added to the menu in the UI. +* ``get_db_manager``: If your auth manager requires one or several database managers (see :class:`~airflow.utils.db_manager.BaseDBManager`), + their class paths need to be returned as part of this method. By doing so, they will be automatically added to the + config ``[database] external_db_managers``. + + +Additional Caveats +^^^^^^^^^^^^^^^^^^ + +* Your auth manager should not reference anything from the ``airflow.security.permissions`` module, as that module is in the process of being deprecated. + Instead, your code should use the definitions in ``airflow.api_fastapi.auth.managers.models.resource_details``. For more details on the ``airflow.security.permissions`` deprecation, see :doc:`/security/deprecated_permissions` +* The ``access_control`` attribute of a DAG instance is only compatible with the FAB auth manager. Custom auth manager implementations should leverage ``get_authorized_dag_ids`` for DAG instance attribute-based access controls in more customizable ways (e.g. authorization based on DAG tags, DAG bundles, etc.). +* You may find it useful to define a private, generalized ``_is_authorized`` method which acts as the standardized authorization mechanism, and which each + public ``is_authorized_*`` method calls with the appropriate parameters. + For concrete examples of this, refer to the ``SimpleAuthManager._is_authorized_method``. Further, it may be useful to optionally use the ``airflow.api_fastapi.auth.managers.base_auth_manager.ExtendedResourceMethod`` reference within your private method. + +DAG and DAG Sub-Component Authorization +--------------------------------------- + +Given the hierarchical structure of DAGs and their composite resources, the auth manager's ``is_authorized_dag`` method should also handle the authorization logic for DAG runs, tasks, and task instances. +The ``access_entity`` parameter passed to ``is_authorized_dag`` indicates which (if any) DAG sub-component the user is attempting to access. This leads to a few important points: + +* If the ``access_entity`` parameter is ``None``, then the user is attempting to interact directly with the DAG, not any of its sub-components. +* When the ``access_entity`` parameter is not ``None``, it means the user is attempting to access some sub-component of the DAG. This is noteworthy, as in some cases the ``method`` parameter may be valid + for the DAG's sub-entity, but not a valid action directly on the DAG itself. For example, the ``POST`` method is valid for DAG runs, but **not** for DAGs. +* One potential way to model the example request mentioned above -- where the ``method`` only has meaning for the DAG sub-component -- is to authorize the user if **both** statements are true: + + * The user has ``PUT`` ("edit") permissions for the given DAG. + * The user has ``POST`` ("create") permissions for DAG runs. + Next Steps ---------- diff --git a/airflow-core/docs/core-concepts/auth-manager/simple/token.rst b/airflow-core/docs/core-concepts/auth-manager/simple/token.rst index 4e9e5cadf3200..0fdaee5dd78f5 100644 --- a/airflow-core/docs/core-concepts/auth-manager/simple/token.rst +++ b/airflow-core/docs/core-concepts/auth-manager/simple/token.rst @@ -30,7 +30,7 @@ Example .. code-block:: bash - ENDPOINT_URL="http://localhost:8080/" + ENDPOINT_URL="http://localhost:8080" curl -X 'POST' \ "${ENDPOINT_URL}/auth/token" \ -H 'Content-Type: application/json' \ @@ -45,5 +45,5 @@ If ``[core] simple_auth_manager_all_admins`` is set to True, you can also genera .. code-block:: bash - ENDPOINT_URL="http://localhost:8080/" + ENDPOINT_URL="http://localhost:8080" curl -X 'GET' "${ENDPOINT_URL}/auth/token" diff --git a/airflow-core/docs/core-concepts/backfill.rst b/airflow-core/docs/core-concepts/backfill.rst index ac6729b23d0d7..955a3661557fb 100644 --- a/airflow-core/docs/core-concepts/backfill.rst +++ b/airflow-core/docs/core-concepts/backfill.rst @@ -20,7 +20,7 @@ Backfill Backfill is when you create runs for past dates of a dag. Airflow provides a mechanism to do this through the CLI and REST API. You provide a dag, a start date, and an end date, -and airflow will create runs in the range according to the dag's schedule. +and Airflow will create runs in the range according to the dag's schedule. Backfill does not make sense for dags that don't have a time-based schedule. diff --git a/airflow-core/docs/core-concepts/dag-run.rst b/airflow-core/docs/core-concepts/dag-run.rst index 1befa0b7c13e1..1cb121c9b721b 100644 --- a/airflow-core/docs/core-concepts/dag-run.rst +++ b/airflow-core/docs/core-concepts/dag-run.rst @@ -167,6 +167,8 @@ the errors after going through the logs, you can re-run the tasks by clearing th scheduled date. Clearing a task instance creates a record of the task instance. The ``try_number`` of the current task instance is incremented, the ``max_tries`` set to ``0`` and the state set to ``None``, which causes the task to re-run. +An experimental feature in Airflow 3.1.0 allows you to clear the task instances and re-run with the latest bundle version. + Click on the failed task in the Tree or Graph views and then click on **Clear**. The executor will re-run it. @@ -199,7 +201,7 @@ Task Instance History --------------------- When a task instance retries or is cleared, the task instance history is preserved. You can see this history by clicking on the task instance in the Grid view. -.. image:: ../img/task_instance_history.png +.. image:: ../img/ui-dark/task_instance_history.png .. note:: The try selector shown above is only available for tasks that have been retried or cleared. @@ -207,7 +209,7 @@ When a task instance retries or is cleared, the task instance history is preserv The history shows the value of the task instance attributes at the end of the particular run. On the log page, you can also see the logs for each of the task instance tries. This can be useful for debugging. -.. image:: ../img/task_instance_history_log.png +.. image:: ../img/ui-dark/task_instance_history_log.png .. note:: Related task instance objects like the XComs, rendered template fields, etc., are not preserved in the history. Only the task instance attributes, including the logs, are preserved. diff --git a/airflow-core/docs/core-concepts/dags.rst b/airflow-core/docs/core-concepts/dags.rst index 5537ea12d9488..bca75697f76c3 100644 --- a/airflow-core/docs/core-concepts/dags.rst +++ b/airflow-core/docs/core-concepts/dags.rst @@ -26,7 +26,7 @@ Dags Here's a basic example DAG: -.. image:: /img/basic-dag.png +.. image:: /img/ui-light/basic_dag.png It defines four Tasks - A, B, C, and D - and dictates the order in which they have to run, and which tasks depend on what others. It will also say how often to run the DAG - maybe "every 5 minutes starting tomorrow", or "every day since January 1st, 2020". @@ -331,7 +331,7 @@ The task_id returned by the Python function has to reference a task directly dow .. note:: When a Task is downstream of both the branching operator *and* downstream of one or more of the selected tasks, it will not be skipped: - .. image:: /img/branch_note.png + .. image:: /img/ui-light/branch_note.png The paths of the branching task are ``branch_a``, ``join`` and ``branch_b``. Since ``join`` is a downstream task of ``branch_a``, it will still be run, even though it was not returned as part of the branch decision. @@ -435,6 +435,7 @@ However, this is just the default behaviour, and you can control it using the `` * ``all_success`` (default): All upstream tasks have succeeded * ``all_failed``: All upstream tasks are in a ``failed`` or ``upstream_failed`` state * ``all_done``: All upstream tasks are done with their execution +* ``all_done_min_one_success``: All non-skipped upstream tasks are done with their execution and at least one upstream task has succeeded * ``all_skipped``: All upstream tasks are in a ``skipped`` state * ``one_failed``: At least one upstream task has failed (does not wait for all upstream tasks to be done) * ``one_success``: At least one upstream task has succeeded (does not wait for all upstream tasks to be done) @@ -491,11 +492,11 @@ You can also combine this with the :ref:`concepts:depends-on-past` functionality ``join`` is downstream of ``follow_branch_a`` and ``branch_false``. The ``join`` task will show up as skipped because its ``trigger_rule`` is set to ``all_success`` by default, and the skip caused by the branching operation cascades down to skip a task marked as ``all_success``. - .. image:: /img/branch_without_trigger.png + .. image:: /img/ui-light/branch_without_trigger.png By setting ``trigger_rule`` to ``none_failed_min_one_success`` in the ``join`` task, we can instead get the intended behaviour: - .. image:: /img/branch_with_trigger.png + .. image:: /img/ui-light/branch_with_trigger.png Setup and teardown @@ -550,7 +551,7 @@ A TaskGroup can be used to organize tasks into hierarchical groups in Graph view Tasks in TaskGroups live on the same original DAG, and honor all the DAG settings and pool configurations. -.. image:: /img/task_group.gif +.. image:: /img/ui-light/task_group.gif Dependency relationships can be applied across all tasks in a TaskGroup with the ``>>`` and ``<<`` operators. For example, the following code puts ``task1`` and ``task2`` in TaskGroup ``group1`` and then puts both tasks upstream of ``task3``: @@ -634,7 +635,7 @@ Or, you can pass a Label object to ``set_upstream``/``set_downstream``: Here's an example DAG which illustrates labeling different branches: -.. image:: /img/edge_label_example.png +.. image:: /img/ui-light/edge_label_example.png .. exampleinclude:: /../src/airflow/example_dags/example_branch_labels.py :language: python diff --git a/airflow-core/docs/core-concepts/debug.rst b/airflow-core/docs/core-concepts/debug.rst index 1e713229f214b..097754b6fd1da 100644 --- a/airflow-core/docs/core-concepts/debug.rst +++ b/airflow-core/docs/core-concepts/debug.rst @@ -78,14 +78,14 @@ Run ``python -m pdb .py`` for an interactive debugging experie .. code-block:: bash - root@ef2c84ad4856:/opt/airflow# python -m pdb airflow/example_dags/example_bash_operator.py - > /opt/airflow/airflow/example_dags/example_bash_operator.py(18)() + root@ef2c84ad4856:/opt/airflow# python -m pdb providers/standard/src/airflow/providers/standard/example_dags/example_bash_operator.py + > /opt/airflow/providers/standard/src/airflow/providers/standard/example_dags/example_bash_operator.py(18)() -> """Example DAG demonstrating the usage of the BashOperator.""" (Pdb) b 45 - Breakpoint 1 at /opt/airflow/airflow/example_dags/example_bash_operator.py:45 + Breakpoint 1 at /opt/airflow/providers/standard/src/airflow/providers/standard/example_dags/example_bash_operator.py:45 (Pdb) c - > /opt/airflow/airflow/example_dags/example_bash_operator.py(45)() - -> bash_command='echo 1', + > /opt/airflow/providers/standard/src/airflow/providers/standard/example_dags/example_bash_operator.py(45)() + -> task_id="run_after_loop", (Pdb) run_this_last diff --git a/airflow-core/docs/core-concepts/executor/index.rst b/airflow-core/docs/core-concepts/executor/index.rst index c83f10cb050bc..ef488f4d9f8e2 100644 --- a/airflow-core/docs/core-concepts/executor/index.rst +++ b/airflow-core/docs/core-concepts/executor/index.rst @@ -48,8 +48,6 @@ If you want to check which executor is currently set, you can use the ``airflow $ airflow config get-value core executor LocalExecutor - - Executor Types -------------- @@ -114,6 +112,8 @@ Airflow tasks are executed ad hoc inside containers/pods. Each task is isolated New Airflow users may assume they need to run a separate executor process using one of the Local or Remote Executors. This is not correct. The executor logic runs *inside* the scheduler process, and will run the tasks locally or not depending on the executor selected. +.. _using-multiple-executors-concurrently: + Using Multiple Executors Concurrently ------------------------------------- @@ -210,11 +210,13 @@ Logging works the same as the single executor use case. Statically-coded Hybrid Executors ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -There are currently two "statically coded" executors, these executors are hybrids of two different executors: the :doc:`LocalKubernetesExecutor ` and the :doc:`CeleryKubernetesExecutor `. Their implementation is not native or intrinsic to core Airflow. These hybrid executors instead make use of the ``queue`` field on Task Instances to indicate and persist which sub-executor to run on. This is a misuse of the ``queue`` field and makes it impossible to use it for its intended purpose when using these hybrid executors. +There were two "statically coded" executors, but they are no longer supported starting from Airflow 3.0.0. + +These executors are hybrids of two different executors: the :doc:`LocalKubernetesExecutor ` and the :doc:`CeleryKubernetesExecutor `. Their implementation is not native or intrinsic to core Airflow. These hybrid executors instead make use of the ``queue`` field on Task Instances to indicate and persist which sub-executor to run on. This is a misuse of the ``queue`` field and makes it impossible to use it for its intended purpose when using these hybrid executors. Executors such as these also require hand crafting new "concrete" classes to create each permutation of possible combinations of executors. This is untenable as more executors are created and leads to more maintenance overhead. Bespoke coding effort should not be required to use any combination of executors. -Therefore using these types of executors is no longer recommended. +Therefore using these types of executors is no longer supported starting from Airflow 3.0.0. It's recommended to use the :ref:`Using Multiple Executors Concurrently ` feature instead. Writing Your Own Executor @@ -232,6 +234,40 @@ Some reasons you may want to write a custom executor include: * You'd like to use an executor that leverages a compute service from your preferred cloud provider. * You have a private tool/service for task execution that is only available to you or your organization. +Workloads +^^^^^^^^^ + +A workload in context of an Executor is the fundamental unit of execution for an executor. It represents a discrete +operation or job that the executor runs on a worker. For example, it can run user code encapsulated in an Airflow task +on a worker. + +Example: + +.. code-block:: python + + ExecuteTask( + token="mock", + ti=TaskInstance( + id=UUID("4d828a62-a417-4936-a7a6-2b3fabacecab"), + task_id="mock", + dag_id="mock", + run_id="mock", + try_number=1, + map_index=-1, + pool_slots=1, + queue="default", + priority_weight=1, + executor_config=None, + parent_context_carrier=None, + context_carrier=None, + queued_dttm=None, + ), + dag_rel_path=PurePosixPath("mock.py"), + bundle_info=BundleInfo(name="n/a", version="no matter"), + log_path="mock.log", + type="ExecuteTask", + ) + Important BaseExecutor Methods ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -239,7 +275,7 @@ Important BaseExecutor Methods These methods don't require overriding to implement your own executor, but are useful to be aware of: * ``heartbeat``: The Airflow scheduler Job loop will periodically call heartbeat on the executor. This is one of the main points of interaction between the Airflow scheduler and the executor. This method updates some metrics, triggers newly queued tasks to execute and updates state of running/completed tasks. -* ``queue_command``: The Airflow Executor will call this method of the BaseExecutor to provide tasks to be run by the executor. The BaseExecutor simply adds the TaskInstances to an internal list of queued tasks within the executor. +* ``queue_workload``: The Airflow Executor will call this method of the BaseExecutor to provide tasks to be run by the executor. The BaseExecutor simply adds the *workloads* (check section above to understand) to an internal list of queued workloads to run within the executor. All executors present in the repository use this method. * ``get_event_buffer``: The Airflow scheduler calls this method to retrieve the current state of the TaskInstances the executor is executing. * ``has_task``: The scheduler uses this BaseExecutor method to determine if an executor already has a specific task instance queued or running. * ``send_callback``: Sends any callbacks to the sink configured on the executor. @@ -251,7 +287,7 @@ Mandatory Methods to Implement The following methods must be overridden at minimum to have your executor supported by Airflow: * ``sync``: Sync will get called periodically during executor heartbeats. Implement this method to update the state of the tasks which the executor knows about. Optionally, attempting to execute queued tasks that have been received from the scheduler. -* ``execute_async``: Executes a command asynchronously. A command in this context is an Airflow CLI command to run an Airflow task. This method is called (after a few layers) during executor heartbeat which is run periodically by the scheduler. In practice, this method often just enqueues tasks into an internal or external queue of tasks to be run (e.g. ``KubernetesExecutor``). But can also execute the tasks directly as well (e.g. ``LocalExecutor``). This will depend on the executor. +* ``execute_async``: Executes a *workload* asynchronously. This method is called (after a few layers) during executor heartbeat which is run periodically by the scheduler. In practice, this method often just enqueues tasks into an internal or external queue of tasks to be run (e.g. ``KubernetesExecutor``). But can also execute the tasks directly as well (e.g. ``LocalExecutor``). This will depend on the executor. Optional Interface Methods to Implement diff --git a/airflow-core/docs/core-concepts/index.rst b/airflow-core/docs/core-concepts/index.rst index fdb9c2d146aaa..8ba314cd36778 100644 --- a/airflow-core/docs/core-concepts/index.rst +++ b/airflow-core/docs/core-concepts/index.rst @@ -43,6 +43,7 @@ Here you can find detailed documentation about each one of the core concepts of auth-manager/index objectstorage backfill + message-queues **Communication** diff --git a/airflow-core/docs/core-concepts/message-queues.rst b/airflow-core/docs/core-concepts/message-queues.rst new file mode 100644 index 0000000000000..573189a24f891 --- /dev/null +++ b/airflow-core/docs/core-concepts/message-queues.rst @@ -0,0 +1,41 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _concepts:message-queues: + +Message Queues +============== + +The Message Queues are a way to expose capability of external event-driven scheduling of Dags. + +Apache Airflow is primarily designed for time-based and dependency-based scheduling of workflows. However, +modern data architectures often require near real-time processing and the ability to react to +events from various sources, such as message queues. + +Airflow has native event-driven capability, allowing users to create workflows that can be +triggered by external events, thus enabling more responsive data pipelines. + +Airflow supports poll-based event-driven scheduling, where the Triggerer can poll +external message queues using built-in :class:`airflow.triggers.base.BaseTrigger` classes. This allows users +to create workflows that can be triggered by external events, such as messages arriving +in a queue or changes in a database efficiently. + +Airflow constantly monitors the state of an external resource and updates the asset whenever the external +resource reaches a given state (if it does reach it). To achieve this, we leverage Airflow Triggers. +Triggers are small, asynchronous pieces of Python code whose job is to poll an external resource state. + +The list of supported message queues is available in :doc:`apache-airflow-providers:core-extensions/message-queues`. diff --git a/airflow-core/docs/core-concepts/operators.rst b/airflow-core/docs/core-concepts/operators.rst index 0d66ba3f61d21..596b10ae16954 100644 --- a/airflow-core/docs/core-concepts/operators.rst +++ b/airflow-core/docs/core-concepts/operators.rst @@ -199,7 +199,7 @@ In some cases, you may want to exclude a string from templating and use it direc ) This will fail with ``TemplateNotFound: cat script.sh`` since Airflow would treat the string as a path to a file, not a command. -We can prevent airflow from treating this value as a reference to a file by wrapping it in :func:`~airflow.util.template.literal`. +We can prevent Airflow from treating this value as a reference to a file by wrapping it in :func:`~airflow.util.template.literal`. This approach disables the rendering of both macros and files and can be applied to selected nested fields while retaining the default templating rules for the remainder of the content. .. code-block:: python diff --git a/airflow-core/docs/core-concepts/overview.rst b/airflow-core/docs/core-concepts/overview.rst index 027d14da12ea8..96eda6bdff1e4 100644 --- a/airflow-core/docs/core-concepts/overview.rst +++ b/airflow-core/docs/core-concepts/overview.rst @@ -22,7 +22,7 @@ Airflow is a platform that lets you build and run *workflows*. A workflow is rep :doc:`DAG ` (a Directed Acyclic Graph), and contains individual pieces of work called :doc:`tasks`, arranged with dependencies and data flows taken into account. -.. image:: ../img/edge_label_example.png +.. image:: ../img/ui-light/edge_label_example.png :alt: An example Airflow DAG, rendered in Graph A DAG specifies the dependencies between tasks, which defines the order in which to execute the tasks. @@ -59,7 +59,7 @@ A minimal Airflow installation consists of the following components: * A folder of *DAG files*, which is read by the *scheduler* to figure out what tasks to run and when to run them. -* A *metadata database*, which airflow components use to store state of workflows and tasks. +* A *metadata database*, which Airflow components use to store state of workflows and tasks. Setting up a metadata database is described in :doc:`/howto/set-up-database` and is required for Airflow to work. @@ -98,7 +98,7 @@ and can be scaled by running multiple instances of the components above. The separation of components also allow for increased security, by isolating the components from each other and by allowing to perform different tasks. For example separating *dag processor* from *scheduler* allows to make sure that the *scheduler* does not have access to the *DAG files* and cannot execute -code provided by *DAG author*. +code provided by *Dag author*. Also while single person can run and manage Airflow installation, Airflow Deployment in more complex setup can involve various roles of users that can interact with different parts of the system, which is @@ -106,7 +106,7 @@ an important aspect of secure Airflow deployment. The roles are described in det :doc:`/security/security_model` and generally speaking include: * Deployment Manager - a person that installs and configures Airflow and manages the deployment -* DAG author - a person that writes dags and submits them to Airflow +* Dag author - a person that writes dags and submits them to Airflow * Operations User - a person that triggers dags and tasks and monitors their execution Architecture Diagrams @@ -153,13 +153,13 @@ Distributed Airflow architecture ................................ This is the architecture of Airflow where components of Airflow are distributed among multiple machines -and where various roles of users are introduced - *Deployment Manager*, **DAG author**, +and where various roles of users are introduced - *Deployment Manager*, **Dag author**, **Operations User**. You can read more about those various roles in the :doc:`/security/security_model`. In the case of a distributed deployment, it is important to consider the security aspects of the components. The *webserver* does not have access to the *DAG files* directly. The code in the ``Code`` tab of the UI is read from the *metadata database*. The *webserver* cannot execute any code submitted by the -**DAG author**. It can only execute code that is installed as an *installed package* or *plugin* by +**Dag author**. It can only execute code that is installed as an *installed package* or *plugin* by the **Deployment Manager**. The **Operations User** only has access to the UI and can only trigger dags and tasks, but cannot author dags. @@ -178,7 +178,7 @@ Separate DAG processing architecture In a more complex installation where security and isolation are important, you'll also see the standalone *dag processor* component that allows to separate *scheduler* from accessing *DAG files*. This is suitable if the deployment focus is on isolation between parsed tasks. While Airflow does not yet -support full multi-tenant features, it can be used to make sure that **DAG author** provided code is never +support full multi-tenant features, it can be used to make sure that **Dag author** provided code is never executed in the context of the scheduler. .. image:: ../img/diagram_dag_processor_airflow_architecture.png @@ -243,6 +243,6 @@ User interface Airflow comes with a user interface that lets you see what dags and their tasks are doing, trigger runs of dags, view logs, and do some limited debugging and resolution of problems with your dags. -.. image:: ../img/dags.png +.. image:: ../img/ui-dark/dags.png It's generally the best way to see the status of your Airflow installation as a whole, as well as diving into individual dags to see their layout, the status of each task, and the logs from each task. diff --git a/airflow-core/docs/core-concepts/params.rst b/airflow-core/docs/core-concepts/params.rst index dd655cae2a324..f313f0bd1591d 100644 --- a/airflow-core/docs/core-concepts/params.rst +++ b/airflow-core/docs/core-concepts/params.rst @@ -32,15 +32,14 @@ If the user-supplied values don't pass validation, Airflow shows a warning inste DAG-level Params ---------------- -To add Params to a :class:`~airflow.models.dag.DAG`, initialize it with the ``params`` kwarg. +To add Params to a :class:`~airflow.sdk.DAG`, initialize it with the ``params`` kwarg. Use a dictionary that maps Param names to either a :class:`~airflow.sdk.definitions.param.Param` or an object indicating the parameter's default value. .. code-block:: :emphasize-lines: 7-10 - from airflow.sdk import DAG - from airflow.sdk import task - from airflow.sdk import Param + from airflow.sdk import DAG, task, Param, get_current_context + import logging with DAG( "the_dag", @@ -51,15 +50,18 @@ Use a dictionary that maps Param names to either a :class:`~airflow.sdk.definiti ) as dag: @task.python - def example_task(params: dict): + def example_task(): + ctx = get_current_context() + logger = logging.getLogger("airflow.task") + # This will print the default value, 6: - dag.log.info(dag.params['my_int_param']) + logger.info(ctx["dag"].params["my_int_param"]) # This will print the manually-provided value, 42: - dag.log.info(params['my_int_param']) + logger.info(ctx["params"]["my_int_param"]) # This will print the default value, 5, since it wasn't provided manually: - dag.log.info(params['x']) + logger.info(ctx["params"]["x"]) example_task() @@ -191,7 +193,7 @@ JSON Schema Validation .. note:: If ``schedule`` is defined for a DAG, params with defaults must be valid. This is validated during DAG parsing. If ``schedule=None`` then params are not validated during DAG parsing but before triggering a DAG. - This is useful in cases where the DAG author does not want to provide defaults but wants to force users provide valid parameters + This is useful in cases where the Dag author does not want to provide defaults but wants to force users provide valid parameters at time of trigger. .. note:: @@ -386,19 +388,19 @@ For examples, please take a look at the two example dags provided: :ref:`Params The Params UI Tutorial is rendered in 4 sections with the most common examples. The first section shows the basic usage without ``Param`` class. -.. image:: ../img/trigger-dag-tutorial-form-1.png +.. image:: ../img/ui-dark/trigger-dag-tutorial-form-1.png The second section shows how to use the ``Param`` class to define more attributes. -.. image:: ../img/trigger-dag-tutorial-form-2.png +.. image:: ../img/ui-dark/trigger-dag-tutorial-form-2.png The third section shows how to model selection lists and drop-downs. -.. image:: ../img/trigger-dag-tutorial-form-3.png +.. image:: ../img/ui-dark/trigger-dag-tutorial-form-3.png Finally the fourth section shows advanced form elements. -.. image:: ../img/trigger-dag-tutorial-form-4.png +.. image:: ../img/ui-dark/trigger-dag-tutorial-form-4.png .. versionchanged:: 3.0.0 By default custom HTML is not allowed to prevent injection of scripts or other malicious HTML code. The previous field named diff --git a/airflow-core/docs/core-concepts/variables.rst b/airflow-core/docs/core-concepts/variables.rst index db0ffacb0884f..6487fd0c131a3 100644 --- a/airflow-core/docs/core-concepts/variables.rst +++ b/airflow-core/docs/core-concepts/variables.rst @@ -33,6 +33,20 @@ To use them, just import and call ``get`` on the Variable model:: # Returns the value of default (None) if the variable is not set baz = Variable.get("baz", default=None) +You can also access variables through the Task Context using +:func:`~airflow.sdk.get_current_context`: + +.. code-block:: python + + from airflow.sdk import get_current_context + + + def my_task(): + context = get_current_context() + var = context["var"] + my_variable = var.get("my_variable_name") + return my_variable + You can also use them from :ref:`templates `:: # Raw value diff --git a/airflow-core/docs/core-concepts/xcoms.rst b/airflow-core/docs/core-concepts/xcoms.rst index 2be9b75bbf849..93463a752768e 100644 --- a/airflow-core/docs/core-concepts/xcoms.rst +++ b/airflow-core/docs/core-concepts/xcoms.rst @@ -25,6 +25,9 @@ XComs (short for "cross-communications") are a mechanism that let :doc:`tasks` t An XCom is identified by a ``key`` (essentially its name), as well as the ``task_id`` and ``dag_id`` it came from. They can have any serializable value (including objects that are decorated with ``@dataclass`` or ``@attr.define``, see :ref:`TaskFlow arguments `:), but they are only designed for small amounts of data; do not use them to pass around large values, like dataframes. +XCom operations should be performed through the Task Context using +:func:`~airflow.sdk.get_current_context`. Directly updating using XCom database model is not possible. + XComs are explicitly "pushed" and "pulled" to/from their storage using the ``xcom_push`` and ``xcom_pull`` methods on Task Instances. To push a value within a task called **"task-1"** that will be used by another task: @@ -73,8 +76,6 @@ An example of pushing multiple XComs and pulling them individually: # Pulling entire xcom data from push_multiple task data = context["ti"].xcom_pull(task_ids="push_multiple", key="return_value") - - .. note:: If the first task run is not succeeded then on every retry task XComs will be cleared to make the task run idempotent. @@ -91,7 +92,7 @@ Custom XCom Backends The XCom system has interchangeable backends, and you can set which backend is being used via the ``xcom_backend`` configuration option. -If you want to implement your own backend, you should subclass :class:`~airflow.models.xcom.BaseXCom`, and override the ``serialize_value`` and ``deserialize_value`` methods. +If you want to implement your own backend, you should subclass :class:`~airflow.sdk.bases.xcom.BaseXCom`, and override the ``serialize_value`` and ``deserialize_value`` methods. You can override the ``purge`` method in the ``BaseXCom`` class to have control over purging the xcom data from the custom backend. This will be called as part of ``delete``. @@ -104,6 +105,6 @@ If you can exec into a terminal in an Airflow container, you can then print out .. code-block:: python - from airflow.models.xcom import XCom + from airflow.sdk.execution_time.xcom import XCom print(XCom.__name__) diff --git a/airflow-core/docs/extra-packages-ref.rst b/airflow-core/docs/extra-packages-ref.rst index 46166ee46aa0a..47106fab2ed8d 100644 --- a/airflow-core/docs/extra-packages-ref.rst +++ b/airflow-core/docs/extra-packages-ref.rst @@ -26,18 +26,18 @@ already existing ``providers`` and the dependencies are isolated and simplified packages. While the original installation methods via ``apache-airflow`` distribution package and extras still -work as previously and it installs complete airflow installation ready to serve as scheduler, webserver, triggerer +work as previously and it installs complete Airflow installation ready to serve as scheduler, webserver, triggerer and worker, the ``apache-airflow`` package is now a meta-package that installs all the other distribution packages, it's also possible to install only the distribution packages that are needed for a specific -component you want to run airflow with. +component you want to run Airflow with. The following distribution packages are available: +----------------------------+------------------------------------------------------------------+----------------------------------------------------------+ | Distribution package | Purpose | Optional extras | +----------------------------+------------------------------------------------------------------+----------------------------------------------------------+ -| apache-airflow-core | This is the core distribution package that contains | * Core extras that add optional functionality to airflow | -| | the airflow scheduler, webserver, triggerer code. | core system - enhancing its functionality across | +| apache-airflow-core | This is the core distribution package that contains | * Core extras that add optional functionality to Airflow | +| | the Airflow scheduler, webserver, triggerer code. | core system - enhancing its functionality across | | | | multiple providers. | | | | | | | | * Group ``all`` extra that installs all optional | @@ -71,7 +71,7 @@ The following distribution packages are available: As mentioned above, Airflow has a number of optional "extras" that you can use to add features to your installation when you are installing Airflow. Those extras are a good way for the users to manage their -installation, but also they are useful for contributors to airflow when they want to contribute some of +installation, but also they are useful for contributors to Airflow when they want to contribute some of the features - including optional integrations of Airflow - via providers. Here's the list of all the extra dependencies of Apache Airflow. @@ -79,16 +79,44 @@ Here's the list of all the extra dependencies of Apache Airflow. Core Airflow extras ------------------- -These are core airflow extras that extend capabilities of core Airflow. They usually do not install provider -packages (with the exception of ``celery`` and ``cncf.kubernetes`` extras), they just install necessary -python dependencies for the provided package. +These are core Airflow extras that extend capabilities of core Airflow. They do not install provider +packages, they just install necessary +python dependencies for the provided package. The same extras are available as ``airflow-core`` package extras. + ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| extra | install command | enables | ++=====================+=====================================================+============================================================================+ +| async | ``pip install 'apache-airflow[async]'`` | Async worker classes for Gunicorn | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| graphviz | ``pip install 'apache-airflow[graphviz]'`` | Graphviz renderer for converting DAG to graphical output | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| kerberos | ``pip install 'apache-airflow[kerberos]'`` | Kerberos integration for Kerberized services (Hadoop, Presto, Trino) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| otel | ``pip install 'apache-airflow[otel]'`` | Required for OpenTelemetry metrics | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| sentry | ``pip install 'apache-airflow[sentry]'`` | Sentry service for application logging and monitoring | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| standard | ``pip install apache-airflow[standard]'`` | Standard hooks and operators | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ + +Meta-airflow package extras +--------------------------- + +Airflow 3 is released in several packages. The ``apache-airflow`` package is a meta-package that installs +all the other packages when you run Airflow as a standalone installation, and it also has several extras +that are not extending Airflow core functionality, but they are useful for the users who want to install +other packages that can be used by airflow or some of its providers. +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | extra | install command | enables | +=====================+=====================================================+============================================================================+ | aiobotocore | ``pip install 'apache-airflow[aiobotocore]'`` | Support for asynchronous (deferrable) operators for Amazon integration | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| async | ``pip install 'apache-airflow[async]'`` | Async worker classes for Gunicorn | +| amazon-aws-auth | ``pip install apache-airflow[amazon-aws-auth]`` | Amazon-aws-auth AWS authentication | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| cloudpickle | ``pip install apache-airflow[cloudpickle]`` | Cloudpickle hooks and operators | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | github-enterprise | ``pip install 'apache-airflow[github-enterprise]'`` | GitHub Enterprise auth backend | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ @@ -96,32 +124,22 @@ python dependencies for the provided package. +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | graphviz | ``pip install 'apache-airflow[graphviz]'`` | Graphviz renderer for converting DAG to graphical output | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| kerberos | ``pip install 'apache-airflow[kerberos]'`` | Kerberos integration for Kerberized services (Hadoop, Presto, Trino) | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | ldap | ``pip install 'apache-airflow[ldap]'`` | LDAP authentication for users | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | leveldb | ``pip install 'apache-airflow[leveldb]'`` | Required for use leveldb extra in google provider | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| otel | ``pip install 'apache-airflow[otel]'`` | Required for OpenTelemetry metrics | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | pandas | ``pip install 'apache-airflow[pandas]'`` | Install Pandas library compatible with Airflow | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| rabbitmq | ``pip install 'apache-airflow[rabbitmq]'`` | RabbitMQ support as a Celery backend | +| polars | ``pip install 'apache-airflow[polars]'`` | Polars hooks and operators | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| sentry | ``pip install 'apache-airflow[sentry]'`` | Sentry service for application logging and monitoring | +| rabbitmq | ``pip install 'apache-airflow[rabbitmq]'`` | RabbitMQ support as a Celery backend | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | s3fs | ``pip install 'apache-airflow[s3fs]'`` | Support for S3 as Airflow FS | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| saml | ``pip install 'apache-airflow[saml]'`` | Support for SAML authentication in Airflow | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| standard | ``pip install apache-airflow[standard]'`` | Standard hooks and operators | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics | +| saml | ``pip install 'apache-airflow[saml]'`` | Support for SAML authentication in Amazon provider | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | uv | ``pip install 'apache-airflow[uv]'`` | Install uv - fast, Rust-based package installer (experimental) | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ -| cloudpickle | ``pip install apache-airflow[cloudpickle]`` | Cloudpickle hooks and operators | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ Providers extras @@ -129,7 +147,7 @@ Providers extras These providers extras are simply convenience extras to install providers so that you can install the providers with simple command - including provider package and necessary dependencies in single command, which allows PIP to resolve any conflicting dependencies. This is extremely useful -for first time installation where you want to repeatably install version of dependencies which are 'valid' for both airflow and providers installed. +for first time installation where you want to repeatably install version of dependencies which are 'valid' for both Airflow and providers installed. For example the below command will install: @@ -146,7 +164,7 @@ with a consistent set of dependencies based on constraint files provided by Airf :substitutions: pip install apache-airflow[google,amazon,apache-spark]==|version| \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" Note, that this will install providers in the versions that were released at the time of Airflow |version| release. You can later upgrade those providers manually if you want to use latest versions of the providers. @@ -198,6 +216,8 @@ custom bash/python providers). +---------------------+-----------------------------------------------------+------------------------------------------------+ | apache-spark | ``pip install 'apache-airflow[apache-spark]'`` | All Spark related operators & hooks | +---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache-tinkerpop | ``pip install apache-airflow[apache-tinkerpop]`` | Apache-tinkerpop hooks and operators | ++---------------------+-----------------------------------------------------+------------------------------------------------+ | apache-webhdfs | ``pip install 'apache-airflow[apache-webhdfs]'`` | HDFS hooks and operators | +---------------------+-----------------------------------------------------+------------------------------------------------+ @@ -377,6 +397,8 @@ pre-installed when Airflow is installed. +---------------------+-----------------------------------------------------+--------------------------------------+--------------+ | jdbc | ``pip install 'apache-airflow[jdbc]'`` | JDBC hooks and operators | | +---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| keycloak | ``pip install apache-airflow[keycloak]`` | Keycloak hooks and operators | + ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ | microsoft-psrp | ``pip install 'apache-airflow[microsoft-psrp]'`` | PSRP hooks and operators | | +---------------------+-----------------------------------------------------+--------------------------------------+--------------+ | microsoft-winrm | ``pip install 'apache-airflow[microsoft-winrm]'`` | WinRM hooks and operators | | @@ -402,9 +424,9 @@ Group extras The group extras are convenience extras. Such extra installs many optional dependencies together. It is not recommended to use it in production, but it is useful for CI, development and testing purposes. -+---------+--------------------------------------------+---------------------------------------------------+ -| extra | install command | enables | -+=========+============================================+===================================================+ ++-----------+------------------------------------------+---------------------------------------------------+ +| extra | install command | enables | ++===========+==========================================+===================================================+ | all | ``pip install apache-airflow[all]`` | All optional dependencies including all providers | +-----------+------------------------------------------+---------------------------------------------------+ | all-core | ``pip install apache-airflow[all-core]`` | All optional core dependencies | diff --git a/airflow-core/docs/faq.rst b/airflow-core/docs/faq.rst index e552e024a4925..f74c4194928bb 100644 --- a/airflow-core/docs/faq.rst +++ b/airflow-core/docs/faq.rst @@ -31,8 +31,8 @@ There are very many reasons why your task might not be getting scheduled. Here a - Does your script "compile", can the Airflow engine parse it and find your DAG object? To test this, you can run ``airflow dags list`` and confirm that your DAG shows up in the list. You can also run - ``airflow tasks list foo_dag_id --tree`` and confirm that your task - shows up in the list as expected. If you use the CeleryExecutor, you + ``airflow dags show foo_dag_id`` and confirm that your task + shows up in the graphviz format as expected. If you use the CeleryExecutor, you may want to confirm that this works both where the scheduler runs as well as where the worker runs. @@ -206,6 +206,37 @@ until ``min_file_process_interval`` is reached since DAG Parser will look for mo return dag +What to do if you see disappearing DAGs on UI? +---------------------------------------------- +There are several reasons why DAGs might disappear from the UI. Common causes include: + +* **Total parsing of all DAGs is too long** - If parsing takes longer than :ref:`config:core__dagbag_import_timeout`, + files may not be processed completely. This often occurs when DAGs don't follow + :ref:`DAG writing best practices` like: + + * Excessive top-level code execution + * External system calls during parsing + * Complex dynamic DAG generation + +* **Inconsistent dynamic DAG generation** - DAGs created through + :doc:`dynamic generation ` must produce stable DAG IDs across parses. + Verify consistency by running ``python your_dag_file.py`` repeatedly. + +* **File processing configuration issues** - A certain combination of parameters may lead to scenarios which certain DAGs are less likely to be processed at each loop. Check these parameters: + + * :ref:`config:dag_processor__file_parsing_sort_mode` - Ensure sorting method matches your sync strategy + * :ref:`config:dag_processor__parsing_processes` - Number of parallel parsers + * :ref:`config:scheduler__parsing_cleanup_interval` - Controls stale DAG cleanup frequency + * :ref:`config:scheduler__dag_stale_not_seen_duration` - Time threshold for marking DAGs as stale + +* **File synchronization problems** - Common with git-sync setups: + + * Symbolic link swapping delays + * Permission changes during sync + * ``mtime`` preservation issues + +* **Time synchronization issues** - Ensure all nodes (database, schedulers, workers) use NTP with <1s clock drift. + DAG construction ^^^^^^^^^^^^^^^^ @@ -509,3 +540,38 @@ This means ``explicit_defaults_for_timestamp`` is disabled in your mysql server #. Set ``explicit_defaults_for_timestamp = 1`` under the ``mysqld`` section in your ``my.cnf`` file. #. Restart the Mysql server. + +Connections +^^^^^^^^^^^ + +How can I test a connection or use a Canary Dag? +------------------------------------------------ + +For security reasons, the test connection functionality is disabled by default across the Airflow UI, +API and CLI. This can be modified by setting ref:`config:core__test_connection`. + +You can utilize a Dag to regularly test connections. This is referred to as a "Canary Dag" and can detect and +alert on failures in external systems that your Dags depend on. You can create a simple Dag that tests connections +such as the following Airflow 3 example: + +.. code-block:: python + + from airflow import DAG + from airflow.sdk import task + + with DAG(dag_id="canary", schedule="@daily", doc_md="Canary DAG to regularly test connections to systems."): + + @task(doc_md="Test a connection by its Connection ID.") + def test_connection(conn_id): + from airflow.hooks.base import BaseHook + + ok, status = BaseHook.get_hook(conn_id=conn_id).test_connection() + if ok: + return status + raise RuntimeError(status) + + for conn_id in [ + # Add more connections here to create tasks to test them. + "aws_default", + ]: + test_connection.override(task_id="test_" + conn_id)(conn_id) diff --git a/airflow-core/docs/howto/add-owner-links.rst b/airflow-core/docs/howto/add-owner-links.rst new file mode 100644 index 0000000000000..fbc8445209df1 --- /dev/null +++ b/airflow-core/docs/howto/add-owner-links.rst @@ -0,0 +1,50 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + + +Add Owner Links to DAG +======================= + +.. versionadded:: 2.4.0 + +You can set the ``owner_links`` argument on your DAG object, which will make the owner a clickable link in the +main dags view page instead of a search filter. + +Two options are supported: + +* An HTTP link (e.g. ``https://www.example.com``) which opens the webpage in your default internet client +* A `mailto `_ link (e.g. ``mailto:example@airflow.com``) which opens your default email client to send an email to the specified address + +In your DAG, set the ``owner_links`` argument specifying a dictionary of an owner (key) and its link (value). +Next define a task using this owner, and the owner in the dags view will link to the specified address. + +.. code-block:: python + :emphasize-lines: 5 + + with DAG( + dag_id="example_dag_owners", + start_date=datetime(2022, 8, 5), + schedule="0 0 * * *", + owner_links={"airflow": "https://airflow.apache.org"}, + ): + BashOperator(task_id="task_using_linked_owner", bash_command="echo 1", owner="airflow") + +**Screenshot**: + +.. image:: ../img/howto-owner-links.gif diff --git a/airflow-core/docs/howto/connection.rst b/airflow-core/docs/howto/connection.rst index c753e1342660f..e58d0260db49b 100644 --- a/airflow-core/docs/howto/connection.rst +++ b/airflow-core/docs/howto/connection.rst @@ -22,7 +22,7 @@ Managing Connections For an overview of hooks and connections, see :doc:`/authoring-and-scheduling/connections`. -Airflow's :class:`~airflow.models.connection.Connection` object is used for storing credentials and other information necessary for connecting to external services. +Airflow's :class:`~airflow.sdk.Connection` object is used for storing credentials and other information necessary for connecting to external services. Connections may be defined in the following ways: @@ -77,7 +77,7 @@ convenience property :py:meth:`~airflow.models.connection.Connection.as_json`. I .. code-block:: pycon - >>> from airflow.models.connection import Connection + >>> from airflow.sdk import Connection >>> c = Connection( ... conn_id="some_conn", ... conn_type="mysql", @@ -94,7 +94,7 @@ In addition, same approach could be used to convert Connection from URI format t .. code-block:: pycon - >>> from airflow.models.connection import Connection + >>> from airflow.sdk import Connection >>> c = Connection( ... conn_id="awesome_conn", ... description="Example Connection", @@ -115,9 +115,16 @@ If serializing with Airflow URI: See :ref:`Connection URI format ` for more details on how to generate the a valid URI. -.. note:: +Visibility in UI and CLI +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Connections defined through environment variables are **not displayed** in the Airflow UI or listed using ``airflow connections list``. + +This is because these connections are **resolved dynamically at runtime**, typically on the **worker** process executing your task. They are not stored in the metadata database or loaded in the webserver or scheduler environment. + +This supports secure deployment patterns where environment-based secrets (e.g. via ``.env`` files, Docker, or Kubernetes secrets) are injected only into runtime components like workers — and not into components exposed to users, like the webserver. - Connections defined in environment variables will not show up in the Airflow UI or using ``airflow connections list``. +If you need connections to appear in the UI for visibility or editing, define them using the metadata database instead. Storing connections in a Secrets Backend diff --git a/airflow-core/docs/howto/custom-operator.rst b/airflow-core/docs/howto/custom-operator.rst index b76a2277fbfea..d6206166e1211 100644 --- a/airflow-core/docs/howto/custom-operator.rst +++ b/airflow-core/docs/howto/custom-operator.rst @@ -24,7 +24,7 @@ Creating a custom Operator Airflow allows you to create new operators to suit the requirements of you or your team. This extensibility is one of the many features which make Apache Airflow powerful. -You can create any operator you want by extending the :class:`airflow.models.baseoperator.BaseOperator` +You can create any operator you want by extending the public SDK base class :class:`~airflow.sdk.BaseOperator`. There are two methods that you need to override in a derived class: diff --git a/airflow-core/docs/howto/custom-view-plugin.rst b/airflow-core/docs/howto/custom-view-plugin.rst index e9a5b795e4716..d34548ae58069 100644 --- a/airflow-core/docs/howto/custom-view-plugin.rst +++ b/airflow-core/docs/howto/custom-view-plugin.rst @@ -20,57 +20,200 @@ Customize view of Apache from Airflow web UI ============================================ Airflow has feature that allows to integrate a custom UI along with its -core UI using the Plugin manager +core UI using the Plugin manager. -This is an example plugin for Airflow that displays absolutely nothing. +Plugins integrate with the Airflow core RestAPI. In this plugin, +three object references are derived from the base class ``airflow.plugins_manager.AirflowPlugin``. +They are fastapi_apps, fastapi_root_middlewares, external_views and react_apps. -In this plugin, two object references are derived from the base class -``airflow.plugins_manager.AirflowPlugin``. They are flask_blueprints and -appbuilder_views +Using fastapi_apps in Airflow plugin, the core RestAPI can be extended +to support extra endpoints to serve custom static file or any other json/application responses. +In this object reference, the list of dictionaries with FastAPI application and metadata information +like the name and the url prefix are passed on. -Using flask_blueprints in Airflow plugin, the core application can be extended -to support the customized application to view Empty Plugin. -In this object reference, the list of Blueprint object with the static template for -rendering the information. +Using fastapi_root_middlewares in Airflow plugin, allows to register custom middleware at the root of +the FastAPI application. This middleware can be used to add custom headers, logging, or any other +functionality to the entire FastAPI application, including core endpoints. +In this object reference, the list of dictionaries with Middleware factories object, +initialization parameters and some metadata information like the name are passed on. -Using appbuilder_views in Airflow plugin, a class that represents a concept is -added and presented with views and methods to implement it. -In this object reference, the list of dictionaries with FlaskAppBuilder BaseView object -and metadata information like name and category is passed on. +Using external_views in Airflow plugin, allows to register custom views that are rendered in iframes or external link +in the Airflow UI. This is useful for integrating external applications or custom dashboards into the Airflow UI. +In this object reference, the list of dictionaries with the view name, href (templatable), destination and +optional parameters like the icon and url_route are passed on. +Using react_apps in Airflow plugin, allows to register custom React applications that can be rendered +in the Airflow UI. This is useful for integrating custom React components or applications into the Airflow UI. +In this object reference, the list of dictionaries with the app name, bundle_url (where to load the js assets, templatable), destination and +optional parameters like the icon and url_route are passed on. -Custom view Registration ------------------------- -A custom view with object reference to flask_appbuilder and Blueprint from flask -and be registered as a part of a :doc:`plugin `. +Information and code samples to register ``fastapi_apps``, ``fastapi_root_middlewares``, ``external_views`` and ``react_apps`` are +available in :doc:`plugin `. -The following is a skeleton for us to implement a new custom view: +Developing React Applications with the Bootstrap Tool +===================================================== -.. exampleinclude:: /empty_plugin/empty_plugin.py - :language: python +Airflow provides a React plugin bootstrap tool to help developers quickly create, develop, and integrate external React applications into the core UI. This is the most flexible +and recommended way to customize the Airflow UI. +This tool generates a complete React project structure that builds as a library compatible with dynamic imports and shares React instances with the host Airflow application. +Creating a New React Plugin Project +----------------------------------- -``Plugins`` specified in the ``category`` key of ``appbuilder_views`` dictionary is -the name of the tab in the navigation bar of the Airflow UI. ``Empty Plugin`` -is the name of the link under the tab ``Plugins``, which will launch the plugin +The bootstrap tool is located in ``dev/react-plugin-tools/`` and provides a simple CLI to generate new React plugin projects: -We need to add Blueprint for generating the part of the application -that needs to be rendered in Airflow web UI. We can define templates, static files -and this blueprint will be registered as part of the Airflow application when the -plugin gets loaded. +.. code-block:: bash -The ``$AIRFLOW_HOME/plugins`` folder with custom view UI have the following folder structure. + # Navigate to the bootstrap tool directory + cd dev/react-plugin-tools -:: + # Create a new plugin project + python bootstrap.py my-awesome-plugin - plugins - ├── empty_plugin.py - ├── templates - | └── empty_plugin - | ├── index.html - └── README.md + # Or specify a custom directory + python bootstrap.py my-awesome-plugin --dir /path/to/my-projects/my-awesome-plugin -The HTML files required to render the views built are added as part of the -Airflow plugin into ``$AIRFLOW_HOME/plugins/templates`` folder and defined in the -blueprint. +This generates a complete React project with Vite, TypeScript, Chakra UI integration, and proper configuration for building as a library that integrates with Airflow's UI. + +React Development Workflow +--------------------------- + +Once your project is generated, refer to the ``README.md`` file in your project directory for complete development instructions, including: + +- Available development scripts (``pnpm dev``, ``pnpm build``, etc.) +- Project structure explanation +- Development workflow with hot reload +- Building for production +- Troubleshooting common React development issues + +The generated project is pre-configured with all necessary tools and follows Airflow's UI development patterns. + +Integrating with Airflow +------------------------- + +To integrate your React application with Airflow, you need to: + +1. **Serve the built assets** you can do that on your own infrastructure or directly within Airflow using ``fastapi_apps`` +2. **Register the React app** using ``react_apps`` plugin configuration + +Example Plugin Implementation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Create an Airflow plugin that serves your React application: + +.. code-block:: python + + from pathlib import Path + from fastapi import FastAPI + from starlette.staticfiles import StaticFiles + import mimetypes + + from airflow.plugins_manager import AirflowPlugin + + # Ensure proper MIME types for cjs files + mimetypes.add_type("application/javascript", ".cjs") + + # Create FastAPI app to serve static files + app = FastAPI() + + # Mount your React app's dist folder + react_app_directory = Path(__file__).parent.joinpath("my-awesome-plugin", "dist") + app.mount( + "/my-react-app", + StaticFiles(directory=react_app_directory, html=True), + name="my_react_app_static", + ) + + + class MyReactPlugin(AirflowPlugin): + name = "My React Plugin" + + # Serve static files + fastapi_apps = [ + { + "app": app, + "url_prefix": "/my-plugin", + "name": "My Plugin Static Server", + } + ] + + # Register React application + react_apps = [ + { + "name": "My Awesome React App", + "url_route": "my-awesome-app", + "bundle_url": "https://airflow-domain/my-plugin/my-react-app/main.umd.cjs", + "destination": "nav", + } + ] + +Plugin Configuration Options +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +React apps support several configuration options, you can take a look at :doc:`plugin ` for more details. + + +Integration Best Practices +--------------------------- + +The generated template follows these best practices for Airflow integration: + +1. **External Dependencies**: React and common libraries are marked as external to avoid conflicts with the host application +2. **Global Naming**: Uses standardized global name (``AirflowPlugin``) for consistency +3. **Library Build**: Configured as UMD library with proper externalization for dynamic imports +4. **MIME Types**: Proper JavaScript MIME type handling for ``.cjs`` files because FastAPI serves them as plain text by default + +Deployment Strategies +--------------------- + +External Hosting +~~~~~~~~~~~~~~~~ + +You can also host assets on external infrastructure: + +.. code-block:: python + + react_apps = [ + { + "name": "My External App", + "url_route": "my-external-app", + "bundle_url": "https://my-cdn.com/main.umd.cjs", + "destination": "nav", + } + ] + +Troubleshooting Integration Issues +----------------------------------- + +Common integration issues and solutions: + +**MIME type issues** + Ensure ``.js`` and ``.cjs`` files are served with correct MIME type using ``mimetypes.add_type("application/javascript", ".cjs")``. + +**Component not loading** + Check that the bundle URL is accessible and matches the expected format. + +**React development issues** + Refer to the ``README.md`` file generated with your project for detailed troubleshooting of React-specific development issues. + +Support for Airflow 2 plugins +============================= + +Airflow 2 plugins are still supported with some limitations. More information on such +plugins can be found in the Airflow 2 documentation. + +Adding Rest endpoints through the blueprints is still supported, those endpoints will +be integrated in the FastAPI application via the WSGI Middleware and accessible +under ``/pluginsv2``. + +Adding Flask-AppBuilder views ( ``appbuilder_views`` ) via the Airflow 2 is still supported in its own iframe. + +It is not possible to extend the AF3 core UI, for instance by extending the base template, nonetheless extra menu items +of the auth managers are added to the core UI security tab and their ``href`` are rendered in iframes. +This is how the fab provider integrates users, roles, actions, resources and permissions custom views in the Airflow 3 UI. + + +Airflow 3 plugins will be improved to allow UI customization for the entire react app, it is recommended +to upgrade your plugins to Airflow 3 plugins when possible. Until then for a temporary or custom needs +it is possible to use a Middleware to inject custom javascript or css to the core UI index request. diff --git a/airflow-core/docs/howto/customize-ui.rst b/airflow-core/docs/howto/customize-ui.rst index 2378fe3974751..6d1c444f9a844 100644 --- a/airflow-core/docs/howto/customize-ui.rst +++ b/airflow-core/docs/howto/customize-ui.rst @@ -61,6 +61,39 @@ After .. image:: ../img/change-site-title/example_instance_name_configuration.png -.. note:: - From version 2.3.0 you can include markup in ``instance_name`` variable for further customization. To enable, set ``instance_name_has_markup`` under the ``[webserver]`` section inside ``airflow.cfg`` to ``True``. +Add custom alert messages on the dashboard +------------------------------------------ + +Extra alert messages can be shown on the UI dashboard. This can be useful for warning about setup issues +or announcing changes to end users. The following example shows how to add alert messages: + +1. Add the following contents to ``airflow_local_settings.py`` file under ``$AIRFLOW_HOME/config``. + Each alert message should specify a severity level (``info``, ``warning``, ``error``) using ``category``. + + .. code-block:: python + + from airflow.api_fastapi.common.types import UIAlert + + DASHBOARD_UIALERTS = [ + UIAlert(text="Welcome to Airflow.", category="info"), + UIAlert(text="Airflow server downtime scheduled for tomorrow at 10:00 AM.", category="warning"), + UIAlert(text="Critical error detected!", category="error"), + ] + + See :ref:`Configuring local settings ` for details on how to + configure local settings. + +2. Restart Airflow Webserver, and you should now see: + +.. image:: ../img/ui-alert-message.png + +Alert messages also support Markdown. In the following example, we show an alert message of heading 2 with a link included. + + .. code-block:: python + + DASHBOARD_UIALERTS = [ + UIAlert(text="## Visit [airflow.apache.org](https://airflow.apache.org)", category="info"), + ] + +.. image:: ../img/ui-alert-message-markdown.png diff --git a/airflow-core/docs/howto/define-extra-link.rst b/airflow-core/docs/howto/define-extra-link.rst index 0a1f1b046897b..83718773a9df8 100644 --- a/airflow-core/docs/howto/define-extra-link.rst +++ b/airflow-core/docs/howto/define-extra-link.rst @@ -21,7 +21,7 @@ Define an operator extra link ============================= -If you want to add further links to operators you can define them via a plugin or provider package. +If you want to add extra links to operators you can define them via a plugin or provider package. Extra links will be displayed in task details page in Grid view. .. image:: ../img/operator_extra_link.png @@ -60,7 +60,8 @@ The following code shows how to add extra links to an operator via Plugins: GoogleLink(), ] -.. note:: Operator Extra Links should be registered via Airflow Plugins or custom Airflow Provider to work. +The extra links defined via custom Airflow Provider or Airflow operators will be pushed as an xcom to the XCom table in +metadata DB during task execution. During display in the grid view, this xcom is retrieved and displayed. You can also add a global operator extra link that will be available to all the operators through an Airflow plugin or through Airflow providers. You can learn more about it in the @@ -93,7 +94,7 @@ tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Ope class S3LogLink(BaseOperatorLink): name = "S3" - # Add list of all the operators to which you want to add this OperatorLinks + # Add list of all the operators to which you want to add this extra link # Example: operators = [GCSToS3Operator, GCSToBigQueryOperator] operators = [GCSToS3Operator] @@ -120,9 +121,9 @@ tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Ope **Overriding Operator Links of Existing Operators**: -It is also possible to replace a built in link on an operator via a Plugin. For example +It is also possible to replace a built-in link on an operator via a Plugin. For example :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator` includes a link to the Google Cloud -Console, but if we wanted to change that link we could: +Console, but if we wanted to change that link we could do: .. code-block:: python @@ -173,7 +174,7 @@ specify the list of operators that provide extra link capability. This happens b class name in the ``provider-info`` information stored in your Provider's package meta-data: Example meta-data required in your provider-info dictionary (this is part of the meta-data returned -by ``apache-airflow-providers-google`` provider currently: +by ``apache-airflow-providers-google`` provider currently): .. code-block:: yaml diff --git a/airflow-core/docs/howto/docker-compose/docker-compose.yaml b/airflow-core/docs/howto/docker-compose/docker-compose.yaml index d00db43cb1af9..2c2a614c9ef72 100644 --- a/airflow-core/docs/howto/docker-compose/docker-compose.yaml +++ b/airflow-core/docs/howto/docker-compose/docker-compose.yaml @@ -71,8 +71,7 @@ x-airflow-common: # for other purpose (development, test and especially production usage) build/extend Airflow image. _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} # The following line can be used to set a custom config file, stored in the local config folder - # If you want to use it, outcomment it and replace airflow.cfg with the name of your config file - # AIRFLOW_CONFIG: '/opt/airflow/config/airflow.cfg' + AIRFLOW_CONFIG: '/opt/airflow/config/airflow.cfg' volumes: - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs @@ -88,7 +87,7 @@ x-airflow-common: services: postgres: - image: postgres:13 + image: postgres:16 environment: POSTGRES_USER: airflow POSTGRES_PASSWORD: airflow @@ -218,6 +217,7 @@ services: echo "For other operating systems you can get rid of the warning with manually created .env file:" echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" echo + export AIRFLOW_UID=$$(id -u) fi one_meg=1048576 mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) @@ -252,9 +252,38 @@ services: echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin" echo fi - mkdir -p /sources/logs /sources/dags /sources/plugins - chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} - exec /entrypoint airflow version + echo + echo "Creating missing opt dirs if missing:" + echo + mkdir -v -p /opt/airflow/{logs,dags,plugins,config} + echo + echo "Airflow version:" + /entrypoint airflow version + echo + echo "Files in shared volumes:" + echo + ls -la /opt/airflow/{logs,dags,plugins,config} + echo + echo "Running airflow config list to create default config file if missing." + echo + /entrypoint airflow config list >/dev/null + echo + echo "Files in shared volumes:" + echo + ls -la /opt/airflow/{logs,dags,plugins,config} + echo + echo "Change ownership of files in /opt/airflow to ${AIRFLOW_UID}:0" + echo + chown -R "${AIRFLOW_UID}:0" /opt/airflow/ + echo + echo "Change ownership of files in shared volumes to ${AIRFLOW_UID}:0" + echo + chown -v -R "${AIRFLOW_UID}:0" /opt/airflow/{logs,dags,plugins,config} + echo + echo "Files in shared volumes:" + echo + ls -la /opt/airflow/{logs,dags,plugins,config} + # yamllint enable rule:line-length environment: <<: *airflow-common-env @@ -264,8 +293,6 @@ services: _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} _PIP_ADDITIONAL_REQUIREMENTS: '' user: "0:0" - volumes: - - ${AIRFLOW_PROJ_DIR:-.}:/sources airflow-cli: <<: *airflow-common @@ -279,6 +306,8 @@ services: - bash - -c - airflow + depends_on: + <<: *airflow-common-depends-on # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up # or by explicitly targeted on the command line e.g. docker-compose up flower. diff --git a/airflow-core/docs/howto/docker-compose/index.rst b/airflow-core/docs/howto/docker-compose/index.rst index 1e60262da14a0..62382469df793 100644 --- a/airflow-core/docs/howto/docker-compose/index.rst +++ b/airflow-core/docs/howto/docker-compose/index.rst @@ -89,7 +89,7 @@ This file contains several service definitions: - ``airflow-scheduler`` - The :doc:`scheduler ` monitors all tasks and dags, then triggers the task instances once their dependencies are complete. - ``airflow-dag-processor`` - The DAG processor parses DAG files. -- ``airflow-webserver`` - The webserver is available at ``http://localhost:8080``. +- ``airflow-api-server`` - The api server is available at ``http://localhost:8080``. - ``airflow-worker`` - The worker that executes the tasks given by the scheduler. - ``airflow-triggerer`` - The triggerer runs an event loop for deferrable tasks. - ``airflow-init`` - The initialization service. @@ -125,8 +125,8 @@ Setting the right Airflow user ------------------------------ On **Linux**, the quick-start needs to know your host user id and needs to have group id set to ``0``. -Otherwise the files created in ``dags``, ``logs`` and ``plugins`` will be created with ``root`` user ownership. -You have to make sure to configure them for the docker-compose: +Otherwise the files created in ``dags``, ``logs``, ``config`` and ``plugins`` will be created with +``root`` user ownership. You have to make sure to configure them for the docker-compose: .. code-block:: bash @@ -143,6 +143,17 @@ safely ignore it. You can also manually create an ``.env`` file in the same fold AIRFLOW_UID=50000 +Initialize airflow.cfg (Optional) +--------------------------------- + +If you want to initialize ``airflow.cfg`` with default values before launching the airflow service, run. + +.. code-block:: bash + + docker compose run airflow-cli airflow config list + +This will seed ``airflow.cfg`` with default values in ``config`` folder. + Initialize the database ----------------------- @@ -199,8 +210,8 @@ In a second terminal you can check the condition of the containers and make sure CONTAINER ID IMAGE |version-spacepad| COMMAND CREATED STATUS PORTS NAMES 247ebe6cf87a apache/airflow:|version| "/usr/bin/dumb-init …" 3 minutes ago Up 3 minutes (healthy) 8080/tcp compose_airflow-worker_1 ed9b09fc84b1 apache/airflow:|version| "/usr/bin/dumb-init …" 3 minutes ago Up 3 minutes (healthy) 8080/tcp compose_airflow-scheduler_1 - 7cb1fb603a98 apache/airflow:|version| "/usr/bin/dumb-init …" 3 minutes ago Up 3 minutes (healthy) 0.0.0.0:8080->8080/tcp compose_airflow-webserver_1 - 74f3bbe506eb postgres:13 |version-spacepad| "docker-entrypoint.s…" 18 minutes ago Up 17 minutes (healthy) 5432/tcp compose_postgres_1 + 7cb1fb603a98 apache/airflow:|version| "/usr/bin/dumb-init …" 3 minutes ago Up 3 minutes (healthy) 0.0.0.0:8080->8080/tcp compose_airflow-api_server_1 + 74f3bbe506eb postgres:16 |version-spacepad| "docker-entrypoint.s…" 18 minutes ago Up 17 minutes (healthy) 5432/tcp compose_postgres_1 0bd6576d23cb redis:latest |version-spacepad| "docker-entrypoint.s…" 10 hours ago Up 17 minutes (healthy) 0.0.0.0:6379->6379/tcp compose_redis_1 Accessing the environment @@ -268,7 +279,7 @@ Here is a sample ``curl`` command, which sends a request to retrieve a pool list .. code-block:: bash - ENDPOINT_URL="http://localhost:8080/" + ENDPOINT_URL="http://localhost:8080" curl -X GET \ --user "airflow:airflow" \ "${ENDPOINT_URL}/api/v1/pools" @@ -296,11 +307,13 @@ Examples of how you can extend the image with custom providers, python packages, apt packages and more can be found in :doc:`Building the image `. .. note:: - Creating custom images means that you need to maintain also a level of automation as you need to re-create the images - when either the packages you want to install or Airflow is upgraded. Please do not forget about keeping these scripts. - Also keep in mind, that in cases when you run pure Python tasks, you can use the - `Python Virtualenv functions <_howto/operator:PythonVirtualenvOperator>`_ which will - dynamically source and install python dependencies during runtime. With Airflow 2.8.0 Virtualenvs can also be cached. + Creating custom images means that you need to maintain also a level of + automation as you need to re-create the images when either the packages you + want to install or Airflow is upgraded. Please do not forget about keeping + these scripts. Also keep in mind, that in cases when you run pure Python + tasks, you can use :ref:`Python Virtualenv functions `, + which will dynamically source and install python dependencies during runtime. + With Airflow 2.8.0, virtualenvs can also be cached. Special case - adding dependencies via requirements.txt file ============================================================ @@ -346,12 +359,9 @@ Special case - Adding a custom config file If you have a custom config file and wish to use it in your Airflow instance, you need to perform the following steps: -1) Remove comment from the ``AIRFLOW_CONFIG: '/opt/airflow/config/airflow.cfg'`` line - in the ``docker-compose.yaml`` file. - -2) Place your custom ``airflow.cfg`` file in the local config folder. +1) Replace the auto-generated ``airflow.cfg`` file in the local config folder with your custom config file. -3) If your config file has a different name than ``airflow.cfg``, adjust the filename in +2) If your config file has a different name than ``airflow.cfg``, adjust the filename in ``AIRFLOW_CONFIG: '/opt/airflow/config/airflow.cfg'`` Networking diff --git a/airflow-core/docs/howto/dynamic-dag-generation.rst b/airflow-core/docs/howto/dynamic-dag-generation.rst index 814b620ea719b..734e89f5d805d 100644 --- a/airflow-core/docs/howto/dynamic-dag-generation.rst +++ b/airflow-core/docs/howto/dynamic-dag-generation.rst @@ -40,7 +40,8 @@ If you want to use variables to configure your code, you should always use `environment variables `_ in your top-level code rather than :doc:`Airflow Variables `. Using Airflow Variables in top-level code creates a connection to the metadata DB of Airflow to fetch the value, which can slow -down parsing and place extra load on the DB. See the `best practices on Airflow Variables `_ +down parsing and place extra load on the DB. See +:ref:`best practices on Airflow Variables ` to make the best use of Airflow Variables in your dags using Jinja templates. For example you could set ``DEPLOYMENT`` variable differently for your production and development diff --git a/airflow-core/docs/howto/email-config.rst b/airflow-core/docs/howto/email-config.rst index 82a3e745c1b6e..c3373447654d7 100644 --- a/airflow-core/docs/howto/email-config.rst +++ b/airflow-core/docs/howto/email-config.rst @@ -88,7 +88,7 @@ Send email using SendGrid Using Default SMTP ^^^^^^^^^^^^^^^^^^ -You can use the default airflow SMTP backend to send email with SendGrid +You can use the default Airflow SMTP backend to send email with SendGrid .. code-block:: ini @@ -184,6 +184,14 @@ Follow the steps below to enable it: email_conn_id = aws_default from_email = From email + Equivalent environment variables looks like + + .. code-block:: + + AIRFLOW__EMAIL__EMAIL_BACKEND=airflow.providers.amazon.aws.utils.emailer.send_email + AIRFLOW__EMAIL__EMAIL_CONN_ID=aws_default + AIRFLOW__EMAIL__FROM_EMAIL=email@example.com + Note that for SES, you must configure from_email to the valid email that can send messages from SES. 3. Create a connection called ``aws_default``, or choose a custom connection diff --git a/airflow-core/docs/howto/export-more-env-vars.rst b/airflow-core/docs/howto/export-more-env-vars.rst index e393f479302d1..a35b3be6fb5af 100644 --- a/airflow-core/docs/howto/export-more-env-vars.rst +++ b/airflow-core/docs/howto/export-more-env-vars.rst @@ -23,7 +23,7 @@ Export dynamic environment variables available for operators to use The key value pairs returned in ``get_airflow_context_vars`` defined in -``airflow_local_settings.py`` are injected to default airflow context environment variables, +``airflow_local_settings.py`` are injected to default Airflow context environment variables, which are available as environment variables when running tasks. Note, both key and value are must be string. diff --git a/airflow-core/docs/howto/index.rst b/airflow-core/docs/howto/index.rst index aa8372dd9195a..396b0877838f4 100644 --- a/airflow-core/docs/howto/index.rst +++ b/airflow-core/docs/howto/index.rst @@ -30,7 +30,9 @@ configuring an Airflow environment. :maxdepth: 2 Using the CLI + Using the REST API <../security/api> add-dag-tags + add-owner-links notifications set-config set-up-database @@ -51,3 +53,4 @@ configuring an Airflow environment. email-config dynamic-dag-generation docker-compose/index + run-with-self-signed-certificate diff --git a/airflow-core/docs/howto/listener-plugin.rst b/airflow-core/docs/howto/listener-plugin.rst index 20569a3fc6f8d..9d13909354813 100644 --- a/airflow-core/docs/howto/listener-plugin.rst +++ b/airflow-core/docs/howto/listener-plugin.rst @@ -44,14 +44,14 @@ Using this plugin, following events can be listened: * dag run is in running state. * dag run is in success state. * dag run is in failure state. - * on start before event like airflow job, scheduler - * before stop for event like airflow job, scheduler + * on start before event like Airflow job, scheduler + * before stop for event like Airflow job, scheduler Listener Registration --------------------- A listener plugin with object reference to listener object is registered -as part of airflow plugin. The following is a +as part of Airflow plugin. The following is a skeleton for us to implement a new listener: .. code-block:: python diff --git a/airflow-core/docs/howto/run-behind-proxy.rst b/airflow-core/docs/howto/run-behind-proxy.rst index 294823753a585..483f3e796904c 100644 --- a/airflow-core/docs/howto/run-behind-proxy.rst +++ b/airflow-core/docs/howto/run-behind-proxy.rst @@ -51,6 +51,11 @@ To do so, you need to set the following setting in your ``airflow.cfg``:: } } +- Some parts of the UI are rendered inside iframes (Auth managers security links for instance), you need to make sure that you are not setting a restricted CSP for iframe rendering + such as ``frame-ancestors 'none'``. You can set the CSP header in your reverse proxy configuration, for example:: + + add_header Content-Security-Policy "frame-ancestors 'self';"; + - Use ``--proxy-headers`` CLI flag to tell Uvicorn to respect these headers: ``airflow api-server --proxy-headers`` - If your proxy server is not on the same host (or in the same docker container) as Airflow, then you will need to @@ -58,6 +63,9 @@ To do so, you need to set the following setting in your ``airflow.cfg``:: `Uvicorn's docs `_. For the full options you can pass here. (Please note the ``--forwarded-allow-ips`` CLI option does not exist in Airflow.) +- Please make sure your proxy does not enforce http-only status on the Set-Cookie headers. + Airflow frontend needs to access the cookies through javascript, and a http-only flag would disturb this functionality. + .. spelling:: Uvicorn diff --git a/airflow-core/docs/howto/run-with-self-signed-certificate.rst b/airflow-core/docs/howto/run-with-self-signed-certificate.rst new file mode 100644 index 0000000000000..53a8446d0f724 --- /dev/null +++ b/airflow-core/docs/howto/run-with-self-signed-certificate.rst @@ -0,0 +1,83 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Running Airflow with a self-signed certificate +############################################## + +Airflow can be configured to run with a self-signed certificate but this +requires a couple of extra steps to enable Workers to trust the API Server. +This guide is based on the :doc:`docker-compose/index` setup. + +.. caution:: + + This procedure is intended for learning, exploration and development. It is + not suitable for production use. + +Generating the certificate +========================== + +The first step is the generation of the certificate. This requires the addition +of ``localhost`` and ``airflow-apiserver`` as Subject Alternative Names so that +the health check and Worker to API Server communications function. + +.. code-block:: sh + + export AIRFLOW_CN=example-common-name + openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem \ + -sha256 -days 3650 -nodes \ + -subj "/CN=$AIRFLOW_CN" \ + -addext "subjectAltName=DNS:localhost,DNS:airflow-apiserver" + +Where ``example-common-name`` is the common name of your server. Place +``cert.pem`` and ``key.pem`` in the ``config`` folder. + +Altering ``docker-compose.yaml`` +================================ + +Add the following two environment variables below and alter the API Server URL +to HTTPS: + +.. code-block:: sh + + AIRFLOW__CORE__EXECUTION_API_SERVER_URL: 'https://airflow-apiserver:8080/execution/' + # Added to enable SSL + AIRFLOW__API__SSL_CERT: '/opt/airflow/config/cert.pem' + AIRFLOW__API__SSL_KEY: '/opt/airflow/config/key.pem' + +Alter the API Server health check to trust the certificate: + +.. code-block:: sh + + airflow-apiserver: + <<: *airflow-common + command: api-server + ports: + - "8080:8080" + healthcheck: + # Add --cacert to trust certificate + test: ["CMD", "curl", "--fail", "--cacert", "${AIRFLOW_PROJ_DIR:-.}/config/cert.pem", "https://localhost:8080/api/v2/version"] + +Running Airflow +=============== + +Now you can start all services: + +.. code-block:: sh + + docker compose up + +The webserver is available at: ``https://localhost:8080`` diff --git a/airflow-core/docs/howto/run-with-systemd.rst b/airflow-core/docs/howto/run-with-systemd.rst index 92dafd5760cd4..1006ce4a06af6 100644 --- a/airflow-core/docs/howto/run-with-systemd.rst +++ b/airflow-core/docs/howto/run-with-systemd.rst @@ -27,6 +27,12 @@ In the ``scripts/systemd`` directory, you can find unit files that have been tested on Redhat based systems. These files can be used as-is by copying them over to ``/usr/lib/systemd/system``. +You can find the latest systemd unit files on GitHub: +https://github.com/apache/airflow/tree/main/scripts/systemd + +Assumptions +----------- + The following **assumptions** have been made while creating these unit files: #. Airflow runs as the following ``user:group`` ``airflow:airflow``. @@ -34,7 +40,32 @@ The following **assumptions** have been made while creating these unit files: If this is not the case, appropriate changes will need to be made. +Environment Configuration +------------------------- + Please **note** that environment configuration is picked up from ``/etc/sysconfig/airflow``. An example file is supplied within ``scripts/systemd``. You can also define configuration at :envvar:`AIRFLOW_HOME` or :envvar:`AIRFLOW_CONFIG`. + +Using Virtual Environments +-------------------------- + +.. note:: + If Airflow is installed inside a virtual environment (e.g. ``venv`` or ``conda``), you must update the ``ExecStart`` line in each systemd unit file to activate the virtualenv first. + + Example: + + .. code-block:: ini + + ExecStart=/bin/bash -c 'source /home/airflow/airflow_venv/bin/activate && airflow scheduler' + + Replace ``/home/airflow/airflow_venv/`` with the path to your virtual environment. + +New Airflow 3.0 Services +------------------------ + +Since Apache Airflow 3.0, additional components have been split out into separate services. The following new unit files are available: + +- ``airflow-triggerer.service`` for deferrable task triggering +- ``airflow-api.service`` for the standalone REST API server diff --git a/airflow-core/docs/howto/set-config.rst b/airflow-core/docs/howto/set-config.rst index b25bf3470a85f..13a46e15f57e7 100644 --- a/airflow-core/docs/howto/set-config.rst +++ b/airflow-core/docs/howto/set-config.rst @@ -105,7 +105,7 @@ The following config options support this ``_cmd`` and ``_secret`` version: * ``result_backend`` in ``[celery]`` section * ``password`` in ``[atlas]`` section * ``smtp_password`` in ``[smtp]`` section -* ``secret_key`` in ``[webserver]`` section +* ``secret_key`` in ``[api]`` section The ``_cmd`` config options can also be set using a corresponding environment variable the same way the usual config options can. For example: @@ -159,12 +159,12 @@ the example below. .. note:: Use the same configuration across all the Airflow components. While each component does not require all, some configurations need to be same otherwise they would not - work as expected. A good example for that is :ref:`secret_key` which + work as expected. A good example for that is :ref:`secret_key` which should be same on the Webserver and Worker to allow Webserver to fetch logs from Worker. The webserver key is also used to authorize requests to Celery workers when logs are retrieved. The token generated using the secret key has a short expiry time though - make sure that time on ALL the machines - that you run airflow components on is synchronized (for example using ntpd) otherwise you might get + that you run Airflow components on is synchronized (for example using ntpd) otherwise you might get "forbidden" errors when the logs are accessed. .. _set-config:configuring-local-settings: @@ -196,27 +196,3 @@ Example settings you can configure this way: * :ref:`Customize your UI ` * :ref:`Configure more variables to export ` * :ref:`Customize your DB configuration ` - - -Configuring Flask Application for Airflow Webserver -=================================================== - -Airflow uses Flask to render the web UI. When you initialize the Airflow webserver, predefined configuration -is used, based on the ``webserver`` section of the ``airflow.cfg`` file. You can override these settings -and add any extra settings however by adding flask configuration to ``webserver_config.py`` file in your -``$AIRFLOW_HOME`` directory. This file is automatically loaded by the webserver. - -For example if you would like to change rate limit strategy to "moving window", you can set the -``RATELIMIT_STRATEGY`` to ``moving-window``. - -You could also enhance / modify the underlying flask app directly, -as the `app context `_ is pushed to ``webserver_config.py``: - -.. code-block:: python - - from flask import current_app as app - - - @app.before_request - def print_custom_message() -> None: - print("Executing before every request") diff --git a/airflow-core/docs/howto/set-up-database.rst b/airflow-core/docs/howto/set-up-database.rst index 27187006804f0..245a8c912c888 100644 --- a/airflow-core/docs/howto/set-up-database.rst +++ b/airflow-core/docs/howto/set-up-database.rst @@ -339,7 +339,7 @@ Migrating off MsSQL Server As with Airflow 2.9.0 the support of MSSQL has ended, a migration script can help with Airflow version 2.7.x or 2.8.x to migrate off SQL-Server. The migration script is available in -`airflow-mssql-migration repo on Github `_. +`airflow-mssql-migration repo on GitHub `_. Note that the migration script is provided without support and warranty. diff --git a/airflow-core/docs/howto/usage-cli.rst b/airflow-core/docs/howto/usage-cli.rst index a3bb13e075994..a7dcc71abba4d 100644 --- a/airflow-core/docs/howto/usage-cli.rst +++ b/airflow-core/docs/howto/usage-cli.rst @@ -44,7 +44,7 @@ For permanent (but not global) airflow activation, use: register-python-argcomplete airflow >> ~/.bashrc -For one-time activation of argcomplete for airflow only, use: +For one-time activation of argcomplete for Airflow only, use: .. code-block:: bash @@ -76,7 +76,7 @@ For example, to print the ``example_complex`` DAG to the terminal: airflow dags show example_complex -This will print the rendered DAG structure (similar to :ref:`Graph `) to the screen in DOT format. +This will print the rendered DAG structure to the screen in DOT format. Multiple file formats are supported. To use them, add the argument ``--save [filename].[format]``. diff --git a/airflow-core/docs/howto/variable.rst b/airflow-core/docs/howto/variable.rst index 20c36597c94e6..a07a04a154571 100644 --- a/airflow-core/docs/howto/variable.rst +++ b/airflow-core/docs/howto/variable.rst @@ -61,10 +61,20 @@ You can use them in your dags as: Single underscores surround ``VAR``. This is in contrast with the way ``airflow.cfg`` parameters are stored, where double underscores surround the config section name. - Variables set using Environment Variables would not appear in the Airflow UI but you will - be able to use them in your DAG file. Variables set using Environment Variables will also + Variables set using Environment Variables will also take precedence over variables defined in the Airflow UI. +Visibility in UI and CLI +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Variables defined through environment variables are **not displayed** in the Airflow UI or listed using ``airflow variables list``. + +This is because these variables are **resolved dynamically at runtime**, typically on the **worker** process executing your task. They are not stored in the metadata database or loaded in the webserver or scheduler environment. + +This supports secure deployment patterns where environment-based secrets (e.g. via ``.env`` files, Docker, or Kubernetes secrets) are injected only into runtime components like workers — and not into components exposed to users, like the webserver. + +If you want variables to appear in the UI for visibility or editing, define them in the metadata database instead. + Securing Variables ------------------ diff --git a/airflow-core/docs/img/airflow-2-arch.png b/airflow-core/docs/img/airflow-2-arch.png new file mode 100644 index 0000000000000..1a37ae6748c3b Binary files /dev/null and b/airflow-core/docs/img/airflow-2-arch.png differ diff --git a/airflow-core/docs/img/airflow-3-arch.png b/airflow-core/docs/img/airflow-3-arch.png new file mode 100644 index 0000000000000..c09acad3968d1 Binary files /dev/null and b/airflow-core/docs/img/airflow-3-arch.png differ diff --git a/airflow-core/docs/img/airflow_erd.sha256 b/airflow-core/docs/img/airflow_erd.sha256 index 33fe09fd943e4..06cc1a78cd12e 100644 --- a/airflow-core/docs/img/airflow_erd.sha256 +++ b/airflow-core/docs/img/airflow_erd.sha256 @@ -1 +1 @@ -bc93e7288a7a8355b15dc721accaf80260f370f3afa0d478248f9fe4692a1f1d \ No newline at end of file +ad634ee7ad7d914013df2cddccb47a9a6201ff19680c71089300e61539032293 \ No newline at end of file diff --git a/airflow-core/docs/img/airflow_erd.svg b/airflow-core/docs/img/airflow_erd.svg index 7ce02c7187b1a..104180ec20ef5 100644 --- a/airflow-core/docs/img/airflow_erd.svg +++ b/airflow-core/docs/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + dag_priority_parsing_request @@ -305,1888 +305,2038 @@ asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 asset_trigger - -asset_trigger - -asset_id - - [INTEGER] - NOT NULL - -trigger_id - - [INTEGER] - NOT NULL + +asset_trigger + +asset_id + + [INTEGER] + NOT NULL + +trigger_id + + [INTEGER] + NOT NULL asset--asset_trigger - -0..N -1 + +0..N +1 asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 - + +task_inlet_asset_reference + +task_inlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + + + +asset--task_inlet_asset_reference + +0..N +1 + + + asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL - + asset--asset_dag_run_queue - -0..N -1 + +0..N +1 - + asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL - + asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 - + dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL - + asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 - + trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] - + trigger--asset_trigger - -0..N -1 + +0..N +1 - + task_instance - -task_instance - -id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -last_heartbeat_at - - [TIMESTAMP] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance + +id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +last_heartbeat_at + + [TIMESTAMP] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] - + trigger--task_instance - -0..N -{0,1} - - - -rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL + +0..N +{0,1} - - -task_instance--rendered_task_instance_fields - -0..N -1 - - - -task_instance--rendered_task_instance_fields - -0..N -1 - - - -task_instance--rendered_task_instance_fields - -0..N -1 + + +deadline + +deadline + +id + + [UUID] + NOT NULL + +callback + + [JSON] + NOT NULL + +callback_state + + [VARCHAR(20)] + +dagrun_id + + [INTEGER] + +deadline_time + + [TIMESTAMP] + NOT NULL + +trigger_id + + [INTEGER] + + + +trigger--deadline + +0..N +{0,1} - + + +hitl_detail + +hitl_detail + +ti_id + + [UUID] + NOT NULL + +body + + [TEXT] + +chosen_options + + [JSON] + +defaults + + [JSON] + +multiple + + [BOOLEAN] + +options + + [JSON] + NOT NULL + +params + + [JSON] + NOT NULL + +params_input + + [JSON] + NOT NULL + +response_at + + [TIMESTAMP] + +subject + + [TEXT] + NOT NULL + +user_id + + [VARCHAR(128)] + + -task_instance--rendered_task_instance_fields - -0..N -1 +task_instance--hitl_detail + +1 +1 - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSONB] - -length - - [INTEGER] - NOT NULL + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSONB] + +length + + [INTEGER] + NOT NULL task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -ti_id - - [UUID] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +ti_id + + [UUID] + NOT NULL task_instance--task_reschedule - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [JSONB] + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [JSONB] task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 - + task_instance_note - -task_instance_note - -ti_id - - [UUID] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +ti_id + + [UUID] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] task_instance--task_instance_note - -1 -1 + +1 +1 - + task_instance_history - -task_instance_history - -task_instance_id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance_history + +task_instance_id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 + + + +rendered_task_instance_fields + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL + + + +task_instance--rendered_task_instance_fields + +0..N +1 + + + +task_instance--rendered_task_instance_fields + +0..N +1 + + + +task_instance--rendered_task_instance_fields + +0..N +1 + + + +task_instance--rendered_task_instance_fields + +0..N +1 - + dag_bundle - -dag_bundle - -name - - [VARCHAR(250)] - NOT NULL - -active - - [BOOLEAN] - -last_refreshed - - [TIMESTAMP] - -version - - [VARCHAR(200)] + +dag_bundle + +name + + [VARCHAR(250)] + NOT NULL + +active + + [BOOLEAN] + +last_refreshed + + [TIMESTAMP] + +signed_url_template + + [VARCHAR(200)] + +template_params + + [JSON] + +version + + [VARCHAR(200)] - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -asset_expression - - [JSON] - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(200)] - -dag_display_name - - [VARCHAR(2000)] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_paused - - [BOOLEAN] - -is_stale - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -relative_fileloc - - [VARCHAR(2000)] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +asset_expression + + [JSON] + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(200)] + +dag_display_name + + [VARCHAR(2000)] + +deadline + + [JSON] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_paused + + [BOOLEAN] + +is_stale + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +relative_fileloc + + [VARCHAR(2000)] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] - + dag_bundle--dag - -0..N -{0,1} + +0..N +{0,1} - + dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 - + dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 - + dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 + + + +dag--task_inlet_asset_reference + +0..N +1 - + dag--asset_dag_run_queue - -0..N -1 + +0..N +1 + + + +dag_version + +dag_version + +id + + [UUID] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(250)] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +version_number + + [INTEGER] + NOT NULL + + + +dag--dag_version + +0..N +1 - + dag_schedule_asset_name_reference - -dag_schedule_asset_name_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_name_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL - + dag--dag_schedule_asset_name_reference - -0..N -1 + +0..N +1 - + dag_schedule_asset_uri_reference - -dag_schedule_asset_uri_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_uri_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL - + dag--dag_schedule_asset_uri_reference - -0..N -1 - - - -dag_version - -dag_version - -id - - [UUID] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(250)] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -version_number - - [INTEGER] - NOT NULL - - - -dag--dag_version - -0..N -1 + +0..N +1 - + dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL - + dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL - + dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL - + dag--dag_warning - -0..N -1 + +0..N +1 - - -deadline - -deadline - -id - - [UUID] - NOT NULL - -callback - - [VARCHAR(500)] - NOT NULL - -callback_kwargs - - [JSON] - -dag_id - - [VARCHAR(250)] - -dagrun_id - - [INTEGER] - -deadline - - [TIMESTAMP] - NOT NULL - - - -dag--deadline - -0..N -{0,1} + + +dag_favorite + +dag_favorite + +dag_id + + [VARCHAR(250)] + NOT NULL + +user_id + + [VARCHAR(250)] + NOT NULL + + + +dag--dag_favorite + +0..N +1 - + dag_version--task_instance - -0..N -{0,1} + +0..N +{0,1} - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -bundle_version - - [VARCHAR(250)] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [JSONB] - -context_carrier - - [JSONB] - -created_dag_version_id - - [UUID] - -creating_job_id - - [INTEGER] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - -queued_at - - [TIMESTAMP] - -run_after - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -scheduled_by_job_id - - [INTEGER] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +bundle_version + + [VARCHAR(250)] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [JSONB] + +context_carrier + + [JSONB] + +created_dag_version_id + + [UUID] + +creating_job_id + + [INTEGER] + +dag_id + + [VARCHAR(250)] + NOT NULL + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + +queued_at + + [TIMESTAMP] + +run_after + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +scheduled_by_job_id + + [INTEGER] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] - + dag_version--dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_code - -dag_code - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -source_code - - [TEXT] - NOT NULL - -source_code_hash - - [VARCHAR(32)] - NOT NULL + +dag_code + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +source_code + + [TEXT] + NOT NULL + +source_code_hash + + [VARCHAR(32)] + NOT NULL - + dag_version--dag_code - -0..N -1 + +0..N +1 - + serialized_dag - -serialized_dag - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -last_updated - - [TIMESTAMP] - NOT NULL + +serialized_dag + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +last_updated + + [TIMESTAMP] + NOT NULL - + dag_version--serialized_dag - -0..N -1 + +0..N +1 - + dag_run--dagrun_asset_event - -0..N -1 + +0..N +1 - + dag_run--task_instance - -0..N -1 + +0..N +1 - + dag_run--task_instance - -0..N -1 + +0..N +1 - + dag_run--deadline - -0..N -{0,1} + +0..N +{0,1} - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL - + dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] - + dag_run--dag_run_note - -1 -1 + +1 +1 - + log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL - + log_template--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - NOT NULL - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] + NOT NULL - + backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill--backfill_dag_run - -0..N -1 + +0..N +1 - + alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/airflow-core/docs/img/asset-scheduled-dags.png b/airflow-core/docs/img/asset-scheduled-dags.png deleted file mode 100644 index 919ef21c4b9a4..0000000000000 Binary files a/airflow-core/docs/img/asset-scheduled-dags.png and /dev/null differ diff --git a/airflow-core/docs/img/assets.png b/airflow-core/docs/img/assets.png deleted file mode 100644 index 990e3f0c15c15..0000000000000 Binary files a/airflow-core/docs/img/assets.png and /dev/null differ diff --git a/airflow-core/docs/img/audit_log.png b/airflow-core/docs/img/audit_log.png deleted file mode 100644 index 062fbaa533e74..0000000000000 Binary files a/airflow-core/docs/img/audit_log.png and /dev/null differ diff --git a/airflow-core/docs/img/backfill.png b/airflow-core/docs/img/backfill.png deleted file mode 100644 index 2abab8235d1a7..0000000000000 Binary files a/airflow-core/docs/img/backfill.png and /dev/null differ diff --git a/airflow-core/docs/img/basic-dag.png b/airflow-core/docs/img/basic-dag.png deleted file mode 100644 index 595e555885ed8..0000000000000 Binary files a/airflow-core/docs/img/basic-dag.png and /dev/null differ diff --git a/airflow-core/docs/img/branch_note.png b/airflow-core/docs/img/branch_note.png deleted file mode 100644 index 409874c30fcb1..0000000000000 Binary files a/airflow-core/docs/img/branch_note.png and /dev/null differ diff --git a/airflow-core/docs/img/branch_with_trigger.png b/airflow-core/docs/img/branch_with_trigger.png deleted file mode 100644 index c2f11ea6bb926..0000000000000 Binary files a/airflow-core/docs/img/branch_with_trigger.png and /dev/null differ diff --git a/airflow-core/docs/img/branch_without_trigger.png b/airflow-core/docs/img/branch_without_trigger.png deleted file mode 100644 index 39b5d9593ada1..0000000000000 Binary files a/airflow-core/docs/img/branch_without_trigger.png and /dev/null differ diff --git a/airflow-core/docs/img/cluster_activity.png b/airflow-core/docs/img/cluster_activity.png deleted file mode 100644 index 7263d07d8542a..0000000000000 Binary files a/airflow-core/docs/img/cluster_activity.png and /dev/null differ diff --git a/airflow-core/docs/img/code.png b/airflow-core/docs/img/code.png deleted file mode 100644 index a9e2867469aee..0000000000000 Binary files a/airflow-core/docs/img/code.png and /dev/null differ diff --git a/airflow-core/docs/img/dags.png b/airflow-core/docs/img/dags.png deleted file mode 100644 index 457dad04f9c08..0000000000000 Binary files a/airflow-core/docs/img/dags.png and /dev/null differ diff --git a/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.md5sum b/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.md5sum index ac3e24d8483b8..d108e6b94b972 100644 --- a/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.md5sum +++ b/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.md5sum @@ -1 +1 @@ -5b82cba489898a46dcfe5f458eeee33b +cdcfa104c489e0e679894e344084c061 diff --git a/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.png b/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.png index 35f3f418f2df4..2a677c0dc9b24 100644 Binary files a/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.png and b/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.png differ diff --git a/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.py b/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.py index 453d17267c8c6..b38c633cfde86 100644 --- a/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.py +++ b/airflow-core/docs/img/diagram_auth_manager_airflow_architecture.py @@ -14,6 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "rich>=12.4.4", +# "diagrams>=0.23.4", +# ] +# /// from __future__ import annotations from pathlib import Path @@ -25,7 +32,13 @@ MY_DIR = Path(__file__).parent MY_FILENAME = Path(__file__).with_suffix("").name -PYTHON_MULTIPROCESS_LOGO = MY_DIR.parents[1] / "diagrams" / "python_multiprocess_logo.png" +AIRFLOW_SOURCES_ROOT = MY_DIR.parents[2] +DIAGRAMS_DIR = AIRFLOW_SOURCES_ROOT / "devel-common" / "src" / "docs" / "diagrams" +PYTHON_MULTIPROCESS_LOGO = DIAGRAMS_DIR / "python_multiprocess_logo.png" +PACKAGES_IMAGE = DIAGRAMS_DIR / "packages.png" +DATABASE_IMAGE = DIAGRAMS_DIR / "database.png" +MULTIPLE_FILES_IMAGE = DIAGRAMS_DIR / "multiple_files.png" +CONFIG_FILE = DIAGRAMS_DIR / "config_file.png" console = Console(width=400, color_system="standard") diff --git a/airflow-core/docs/img/diagram_basic_airflow_architecture.md5sum b/airflow-core/docs/img/diagram_basic_airflow_architecture.md5sum index 810a76ed84d78..7e45d83de2a6b 100644 --- a/airflow-core/docs/img/diagram_basic_airflow_architecture.md5sum +++ b/airflow-core/docs/img/diagram_basic_airflow_architecture.md5sum @@ -1 +1 @@ -cc2aca72cb388d28842e539f599d373c +77f976811a58e56d80a27df345b0d94c diff --git a/airflow-core/docs/img/diagram_basic_airflow_architecture.png b/airflow-core/docs/img/diagram_basic_airflow_architecture.png index 94a0590419f33..d1a04da23e627 100644 Binary files a/airflow-core/docs/img/diagram_basic_airflow_architecture.png and b/airflow-core/docs/img/diagram_basic_airflow_architecture.png differ diff --git a/airflow-core/docs/img/diagram_basic_airflow_architecture.py b/airflow-core/docs/img/diagram_basic_airflow_architecture.py index 7d77cb321eee8..a5185fcca9b85 100644 --- a/airflow-core/docs/img/diagram_basic_airflow_architecture.py +++ b/airflow-core/docs/img/diagram_basic_airflow_architecture.py @@ -14,6 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "rich>=12.4.4", +# "diagrams>=0.23.4", +# ] +# /// from __future__ import annotations from pathlib import Path @@ -26,9 +33,13 @@ MY_DIR = Path(__file__).parent MY_FILENAME = Path(__file__).with_suffix("").name -PACKAGES_IMAGE = MY_DIR.parents[1] / "diagrams" / "packages.png" -DATABASE_IMAGE = MY_DIR.parents[1] / "diagrams" / "database.png" -MULTIPLE_FILES_IMAGE = MY_DIR.parents[1] / "diagrams" / "multiple_files.png" +AIRFLOW_SOURCES_ROOT = MY_DIR.parents[2] +DIAGRAMS_DIR = AIRFLOW_SOURCES_ROOT / "devel-common" / "src" / "docs" / "diagrams" +PYTHON_MULTIPROCESS_LOGO = DIAGRAMS_DIR / "python_multiprocess_logo.png" +PACKAGES_IMAGE = DIAGRAMS_DIR / "packages.png" +DATABASE_IMAGE = DIAGRAMS_DIR / "database.png" +MULTIPLE_FILES_IMAGE = DIAGRAMS_DIR / "multiple_files.png" +CONFIG_FILE = DIAGRAMS_DIR / "config_file.png" console = Console(width=400, color_system="standard") @@ -73,7 +84,7 @@ def generate_basic_airflow_diagram(): user >> Edge(color="blue", style="solid", reverse=False, label="install\n\n") >> plugins_and_packages with Cluster("UI"): - webserver = Python("Webserver") + webserver = Python("API Server") webserver >> Edge(color="black", style="solid", reverse=True, label="operate\n\n") >> user diff --git a/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.md5sum b/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.md5sum index 9b9ffcdc2a980..9a2e8b5175b45 100644 --- a/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.md5sum +++ b/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.md5sum @@ -1 +1 @@ -00f67a1e0cd073ba521da168dc80ccaa +45f71f4dbb89f345bc5fe1fd8cf5c152 diff --git a/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.png b/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.png index 9d8773877835a..d821e1e9834b5 100644 Binary files a/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.png and b/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.png differ diff --git a/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.py b/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.py index 9a33c5af05a49..9d6ce60078bf7 100644 --- a/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.py +++ b/airflow-core/docs/img/diagram_dag_processor_airflow_architecture.py @@ -14,6 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "rich>=12.4.4", +# "diagrams>=0.23.4", +# ] +# /// from __future__ import annotations from pathlib import Path @@ -25,10 +32,13 @@ MY_DIR = Path(__file__).parent MY_FILENAME = Path(__file__).with_suffix("").name -PYTHON_MULTIPROCESS_LOGO = MY_DIR.parents[1] / "diagrams" / "python_multiprocess_logo.png" -PACKAGES_IMAGE = MY_DIR.parents[1] / "diagrams" / "packages.png" -DATABASE_IMAGE = MY_DIR.parents[1] / "diagrams" / "database.png" -MULTIPLE_FILES_IMAGE = MY_DIR.parents[1] / "diagrams" / "multiple_files.png" +AIRFLOW_SOURCES_ROOT = MY_DIR.parents[2] +DIAGRAMS_DIR = AIRFLOW_SOURCES_ROOT / "devel-common" / "src" / "docs" / "diagrams" +PYTHON_MULTIPROCESS_LOGO = DIAGRAMS_DIR / "python_multiprocess_logo.png" +PACKAGES_IMAGE = DIAGRAMS_DIR / "packages.png" +DATABASE_IMAGE = DIAGRAMS_DIR / "database.png" +MULTIPLE_FILES_IMAGE = DIAGRAMS_DIR / "multiple_files.png" +CONFIG_FILE = DIAGRAMS_DIR / "config_file.png" console = Console(width=400, color_system="standard") @@ -62,7 +72,7 @@ def generate_dag_processor_airflow_diagram(): schedulers = Custom("Scheduler(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) with Cluster("UI"): - webservers = Custom("Webserver(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) + webservers = Custom("API Server(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) webservers >> Edge(color="black", style="solid", reverse=True, label="operate\n\n") >> operations_user diff --git a/airflow-core/docs/img/diagram_distributed_airflow_architecture.md5sum b/airflow-core/docs/img/diagram_distributed_airflow_architecture.md5sum index f0df07cedd4b0..bdb6dfd7b2df9 100644 --- a/airflow-core/docs/img/diagram_distributed_airflow_architecture.md5sum +++ b/airflow-core/docs/img/diagram_distributed_airflow_architecture.md5sum @@ -1 +1 @@ -887125381a232d742ab059c2049f3176 +e702f41492ce4856db9a092f4a01c89b diff --git a/airflow-core/docs/img/diagram_distributed_airflow_architecture.png b/airflow-core/docs/img/diagram_distributed_airflow_architecture.png index b1b46d3d3c7cd..751f912a4ba87 100644 Binary files a/airflow-core/docs/img/diagram_distributed_airflow_architecture.png and b/airflow-core/docs/img/diagram_distributed_airflow_architecture.png differ diff --git a/airflow-core/docs/img/diagram_distributed_airflow_architecture.py b/airflow-core/docs/img/diagram_distributed_airflow_architecture.py index 831a9783a85e0..264fc67e7fb92 100644 --- a/airflow-core/docs/img/diagram_distributed_airflow_architecture.py +++ b/airflow-core/docs/img/diagram_distributed_airflow_architecture.py @@ -14,6 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "rich>=12.4.4", +# "diagrams>=0.23.4", +# ] +# /// from __future__ import annotations from pathlib import Path @@ -25,10 +32,13 @@ MY_DIR = Path(__file__).parent MY_FILENAME = Path(__file__).with_suffix("").name -PYTHON_MULTIPROCESS_LOGO = MY_DIR.parents[1] / "diagrams" / "python_multiprocess_logo.png" -PACKAGES_IMAGE = MY_DIR.parents[1] / "diagrams" / "packages.png" -DATABASE_IMAGE = MY_DIR.parents[1] / "diagrams" / "database.png" -MULTIPLE_FILES_IMAGE = MY_DIR.parents[1] / "diagrams" / "multiple_files.png" +AIRFLOW_SOURCES_ROOT = MY_DIR.parents[2] +DIAGRAMS_DIR = AIRFLOW_SOURCES_ROOT / "devel-common" / "src" / "docs" / "diagrams" +PYTHON_MULTIPROCESS_LOGO = DIAGRAMS_DIR / "python_multiprocess_logo.png" +PACKAGES_IMAGE = DIAGRAMS_DIR / "packages.png" +DATABASE_IMAGE = DIAGRAMS_DIR / "database.png" +MULTIPLE_FILES_IMAGE = DIAGRAMS_DIR / "multiple_files.png" +CONFIG_FILE = DIAGRAMS_DIR / "config_file.png" console = Console(width=400, color_system="standard") @@ -86,7 +96,7 @@ def generate_distributed_airflow_diagram(): operations_user = User("Operations User") with Cluster("UI"): - webservers = Custom("Webserver(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) + webservers = Custom("API Server(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) webservers >> Edge(color="black", style="solid", reverse=True, label="operate\n\n") >> operations_user diff --git a/airflow-core/docs/img/diagram_multi_team_airflow_architecture.md5sum b/airflow-core/docs/img/diagram_multi_team_airflow_architecture.md5sum index 824f07b0e0157..112c4c3144ce0 100644 --- a/airflow-core/docs/img/diagram_multi_team_airflow_architecture.md5sum +++ b/airflow-core/docs/img/diagram_multi_team_airflow_architecture.md5sum @@ -1 +1 @@ -c50412b8cf43b84752d4acbf9d7a40ee +5d7f15fe6684789fde4c8b4ee5ae058d diff --git a/airflow-core/docs/img/diagram_multi_team_airflow_architecture.png b/airflow-core/docs/img/diagram_multi_team_airflow_architecture.png index 75ff970e95f12..1166a0cd2c70a 100644 Binary files a/airflow-core/docs/img/diagram_multi_team_airflow_architecture.png and b/airflow-core/docs/img/diagram_multi_team_airflow_architecture.png differ diff --git a/airflow-core/docs/img/diagram_multi_team_airflow_architecture.py b/airflow-core/docs/img/diagram_multi_team_airflow_architecture.py index 43e2e62c0604f..d6475bf92a589 100644 --- a/airflow-core/docs/img/diagram_multi_team_airflow_architecture.py +++ b/airflow-core/docs/img/diagram_multi_team_airflow_architecture.py @@ -14,6 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "rich>=12.4.4", +# "diagrams>=0.23.4", +# ] +# /// from __future__ import annotations from pathlib import Path @@ -26,11 +33,13 @@ MY_DIR = Path(__file__).parent MY_FILENAME = Path(__file__).with_suffix("").name -PYTHON_MULTIPROCESS_LOGO = MY_DIR.parents[1] / "diagrams" / "python_multiprocess_logo.png" -PACKAGES_IMAGE = MY_DIR.parents[1] / "diagrams" / "packages.png" -DATABASE_IMAGE = MY_DIR.parents[1] / "diagrams" / "database.png" -MULTIPLE_FILES_IMAGE = MY_DIR.parents[1] / "diagrams" / "multiple_files.png" -CONFIG_FILE = MY_DIR.parents[1] / "diagrams" / "config_file.png" +AIRFLOW_SOURCES_ROOT = MY_DIR.parents[2] +DIAGRAMS_DIR = AIRFLOW_SOURCES_ROOT / "devel-common" / "src" / "docs" / "diagrams" +PYTHON_MULTIPROCESS_LOGO = DIAGRAMS_DIR / "python_multiprocess_logo.png" +PACKAGES_IMAGE = DIAGRAMS_DIR / "packages.png" +DATABASE_IMAGE = DIAGRAMS_DIR / "database.png" +MULTIPLE_FILES_IMAGE = DIAGRAMS_DIR / "multiple_files.png" +CONFIG_FILE = DIAGRAMS_DIR / "config_file.png" console = Console(width=400, color_system="standard") @@ -70,15 +79,13 @@ def generate_dag_processor_airflow_diagram(): metadata_db = Custom("Metadata DB", DATABASE_IMAGE.as_posix()) with Cluster("UI"): - webservers = Custom("Webserver(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) + webservers = Custom("API Server(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) auth_manager = Custom("Auth\nManager", PYTHON_MULTIPROCESS_LOGO.as_posix()) organization_plugins_and_packages = Custom( "Common\nOrganization\nPlugins &\nPackages", PACKAGES_IMAGE.as_posix() ) - organization_config_file = Custom("Config\nFile\nCommon\nOrganization", CONFIG_FILE.as_posix()) - internal_api = Custom("Task SDK\nGRPC API", PYTHON_MULTIPROCESS_LOGO.as_posix()) ( internal_api @@ -104,7 +111,7 @@ def generate_dag_processor_airflow_diagram(): ) deployment_manager_1 = User("Deployment\nManager\nTeam 1") - dag_author_1 = User("DAG Author\nTeamt 1") + dag_author_1 = User("DAG Author\nTeam 1") with Cluster("Team 1 Airflow Deployment", graph_attr={"bgcolor": "#AAAABB", "fontsize": "22"}): with Cluster("No DB access"): @@ -113,9 +120,8 @@ def generate_dag_processor_airflow_diagram(): triggerer_1 = Custom("Triggerer(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) with Cluster("Parsing"): dag_processors_1 = Custom("DAG\nProcessor(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) - dag_files_1 = Custom("DAGS/Team 1", MULTIPLE_FILES_IMAGE.as_posix()) + dag_files_1 = Custom("DAG Bundles\nTeam 1", MULTIPLE_FILES_IMAGE.as_posix()) plugins_and_packages_1 = Custom("Plugins\n& Packages\nTenant 1", PACKAGES_IMAGE.as_posix()) - config_file_1 = Custom("Config\nFile\nTeam 1", CONFIG_FILE.as_posix()) operations_user_1 = User("Operations User\nTeam 1") deployment_manager_2 = User("Deployment\nManager\nTeam 2") @@ -128,9 +134,8 @@ def generate_dag_processor_airflow_diagram(): triggerer_2 = Custom("Triggerer(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) with Cluster("Parsing"): dag_processors_2 = Custom("DAG\nProcessor(s)", PYTHON_MULTIPROCESS_LOGO.as_posix()) - dag_files_2 = Custom("DAGS/Team 2", MULTIPLE_FILES_IMAGE.as_posix()) + dag_files_2 = Custom("DAG Bundles\nTeam 2", MULTIPLE_FILES_IMAGE.as_posix()) plugins_and_packages_2 = Custom("Plugins\n& Packages\nTeam 2", PACKAGES_IMAGE.as_posix()) - config_file_2 = Custom("Config\nFile\nTeam 2", CONFIG_FILE.as_posix()) operations_user_2 = User("Operations User\nTeam 2") ( @@ -154,12 +159,6 @@ def generate_dag_processor_airflow_diagram(): >> plugins_and_packages_1 ) - ( - deployment_manager_1 - >> Edge(color="blue", style="dashed", reverse=False, label="configure\n\n") - >> config_file_1 - ) - dag_author_2 >> Edge(color="brown", style="dashed", reverse=False, label="author\n\n") >> dag_files_2 ( deployment_manager_2 @@ -167,24 +166,12 @@ def generate_dag_processor_airflow_diagram(): >> plugins_and_packages_2 ) - ( - deployment_manager_2 - >> Edge(color="blue", style="solid", reverse=False, label="configure\n\n") - >> config_file_2 - ) - ( organization_plugins_and_packages - Edge(color="blue", style="solid", reverse=True, label="install\n\n") - organization_deployment_manager ) - ( - organization_config_file - - Edge(color="blue", style="solid", reverse=True, label="configure\n\n") - - organization_deployment_manager - ) - plugins_and_packages_1 >> Edge(style="invis") >> workers_1 plugins_and_packages_1 >> Edge(style="invis") >> dag_processors_1 plugins_and_packages_1 >> Edge(style="invis") >> triggerer_1 @@ -237,7 +224,6 @@ def generate_dag_processor_airflow_diagram(): dag_files_2 >> Edge(color="brown", style="solid", label="sync\n\n") >> triggerer_2 # This is for better layout. Invisible edges are used to align the nodes better - schedulers - Edge(style="invis") - organization_config_file schedulers - Edge(style="invis") - organization_plugins_and_packages metadata_db - Edge(style="invis") - executor_1 metadata_db - Edge(style="invis") - executor_2 diff --git a/airflow-core/docs/img/diagram_task_lifecycle.md5sum b/airflow-core/docs/img/diagram_task_lifecycle.md5sum index 5b00beaa9d888..a19ff4c1e4d6d 100644 --- a/airflow-core/docs/img/diagram_task_lifecycle.md5sum +++ b/airflow-core/docs/img/diagram_task_lifecycle.md5sum @@ -1 +1 @@ -ef689d2a19fcef658dca32076bb0bfd4 +2f0f0308c52315026f2ac50939247857 diff --git a/airflow-core/docs/img/diagram_task_lifecycle.png b/airflow-core/docs/img/diagram_task_lifecycle.png index 6f5f4e25a3026..837a9a7a06692 100644 Binary files a/airflow-core/docs/img/diagram_task_lifecycle.png and b/airflow-core/docs/img/diagram_task_lifecycle.png differ diff --git a/airflow-core/docs/img/diagram_task_lifecycle.py b/airflow-core/docs/img/diagram_task_lifecycle.py index 969a4fee324ec..3dc66c1635c35 100644 --- a/airflow-core/docs/img/diagram_task_lifecycle.py +++ b/airflow-core/docs/img/diagram_task_lifecycle.py @@ -14,6 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "rich>=12.4.4", +# "diagrams>=0.23.4", +# ] +# /// from __future__ import annotations from pathlib import Path @@ -25,12 +32,21 @@ MY_DIR = Path(__file__).parent MY_FILENAME = Path(__file__).with_suffix("").name -COMPONENT_IMG = (MY_DIR.parents[1] / "diagrams" / "task_lifecycle" / "component.png").as_posix() -CONDITION_IMG = (MY_DIR.parents[1] / "diagrams" / "task_lifecycle" / "condition.png").as_posix() -SHARED_STATE_IMG = (MY_DIR.parents[1] / "diagrams" / "task_lifecycle" / "shared_state.png").as_posix() -TERMINAL_STATE_IMG = (MY_DIR.parents[1] / "diagrams" / "task_lifecycle" / "terminal_state.png").as_posix() -SENSOR_STATE_IMG = (MY_DIR.parents[1] / "diagrams" / "task_lifecycle" / "sensor_state.png").as_posix() -DEFERRABLE_STATE_IMG = (MY_DIR.parents[1] / "diagrams" / "task_lifecycle" / "deferrable_state.png").as_posix() +MY_DIR = Path(__file__).parent +MY_FILENAME = Path(__file__).with_suffix("").name +AIRFLOW_SOURCES_ROOT = MY_DIR.parents[2] +DIAGRAMS_DIR = AIRFLOW_SOURCES_ROOT / "devel-common" / "src" / "docs" / "diagrams" +PYTHON_MULTIPROCESS_LOGO = DIAGRAMS_DIR / "python_multiprocess_logo.png" +PACKAGES_IMAGE = DIAGRAMS_DIR / "packages.png" +DATABASE_IMAGE = DIAGRAMS_DIR / "database.png" +MULTIPLE_FILES_IMAGE = DIAGRAMS_DIR / "multiple_files.png" +CONFIG_FILE = DIAGRAMS_DIR / "config_file.png" +COMPONENT_IMG = (DIAGRAMS_DIR / "task_lifecycle" / "component.png").as_posix() +CONDITION_IMG = (DIAGRAMS_DIR / "task_lifecycle" / "condition.png").as_posix() +SHARED_STATE_IMG = (DIAGRAMS_DIR / "task_lifecycle" / "shared_state.png").as_posix() +TERMINAL_STATE_IMG = (DIAGRAMS_DIR / "task_lifecycle" / "terminal_state.png").as_posix() +SENSOR_STATE_IMG = (DIAGRAMS_DIR / "task_lifecycle" / "sensor_state.png").as_posix() +DEFERRABLE_STATE_IMG = (DIAGRAMS_DIR / "task_lifecycle" / "deferrable_state.png").as_posix() STATE_NODE_ATTRS = {"width": "4.16", "height": "1", "fontname": "Monospace", "fontsize": "20"} COMPONENT_NODE_ATTRS = { diff --git a/airflow-core/docs/img/duration.png b/airflow-core/docs/img/duration.png deleted file mode 100644 index 3f552b847a00f..0000000000000 Binary files a/airflow-core/docs/img/duration.png and /dev/null differ diff --git a/airflow-core/docs/img/edge_label_example.png b/airflow-core/docs/img/edge_label_example.png deleted file mode 100644 index 52c280fcb172c..0000000000000 Binary files a/airflow-core/docs/img/edge_label_example.png and /dev/null differ diff --git a/airflow-core/docs/img/gantt.png b/airflow-core/docs/img/gantt.png deleted file mode 100644 index 0a8afceeb21a3..0000000000000 Binary files a/airflow-core/docs/img/gantt.png and /dev/null differ diff --git a/airflow-core/docs/img/graph.png b/airflow-core/docs/img/graph.png deleted file mode 100644 index cbb17ca3adf1f..0000000000000 Binary files a/airflow-core/docs/img/graph.png and /dev/null differ diff --git a/airflow-core/docs/img/grid.png b/airflow-core/docs/img/grid.png deleted file mode 100644 index 8ac6f458c28b3..0000000000000 Binary files a/airflow-core/docs/img/grid.png and /dev/null differ diff --git a/airflow-core/docs/img/grid_instance_details.png b/airflow-core/docs/img/grid_instance_details.png deleted file mode 100644 index 8e8888654dee3..0000000000000 Binary files a/airflow-core/docs/img/grid_instance_details.png and /dev/null differ diff --git a/airflow-core/docs/img/grid_mapped_task.png b/airflow-core/docs/img/grid_mapped_task.png deleted file mode 100644 index 7e64923c8f5ed..0000000000000 Binary files a/airflow-core/docs/img/grid_mapped_task.png and /dev/null differ diff --git a/airflow-core/docs/img/grid_run_details.png b/airflow-core/docs/img/grid_run_details.png deleted file mode 100644 index 68de1820a4922..0000000000000 Binary files a/airflow-core/docs/img/grid_run_details.png and /dev/null differ diff --git a/airflow-core/docs/img/grid_task_details.png b/airflow-core/docs/img/grid_task_details.png deleted file mode 100644 index 62d3f859e7889..0000000000000 Binary files a/airflow-core/docs/img/grid_task_details.png and /dev/null differ diff --git a/airflow-core/docs/img/grid_task_group.png b/airflow-core/docs/img/grid_task_group.png deleted file mode 100644 index b69656582f5d7..0000000000000 Binary files a/airflow-core/docs/img/grid_task_group.png and /dev/null differ diff --git a/airflow-core/docs/img/home.png b/airflow-core/docs/img/home.png deleted file mode 100644 index fa0b30c636b09..0000000000000 Binary files a/airflow-core/docs/img/home.png and /dev/null differ diff --git a/airflow-core/docs/img/mapping-simple-graph.png b/airflow-core/docs/img/mapping-simple-graph.png deleted file mode 100644 index 476e16c3f0163..0000000000000 Binary files a/airflow-core/docs/img/mapping-simple-graph.png and /dev/null differ diff --git a/airflow-core/docs/img/mapping-simple-grid.png b/airflow-core/docs/img/mapping-simple-grid.png deleted file mode 100644 index ce5bdaeaf8f0d..0000000000000 Binary files a/airflow-core/docs/img/mapping-simple-grid.png and /dev/null differ diff --git a/airflow-core/docs/img/operator_extra_link.png b/airflow-core/docs/img/operator_extra_link.png index e28dfb241bccb..30302de841dd0 100644 Binary files a/airflow-core/docs/img/operator_extra_link.png and b/airflow-core/docs/img/operator_extra_link.png differ diff --git a/airflow-core/docs/img/run_types.png b/airflow-core/docs/img/run_types.png deleted file mode 100644 index 6025508e8ccf5..0000000000000 Binary files a/airflow-core/docs/img/run_types.png and /dev/null differ diff --git a/airflow-core/docs/img/task_group.gif b/airflow-core/docs/img/task_group.gif deleted file mode 100644 index b7844f862dac1..0000000000000 Binary files a/airflow-core/docs/img/task_group.gif and /dev/null differ diff --git a/airflow-core/docs/img/task_instance_history.png b/airflow-core/docs/img/task_instance_history.png deleted file mode 100644 index d1dcf27a5e4f3..0000000000000 Binary files a/airflow-core/docs/img/task_instance_history.png and /dev/null differ diff --git a/airflow-core/docs/img/task_instance_history_log.png b/airflow-core/docs/img/task_instance_history_log.png deleted file mode 100644 index c1d83f3ed3216..0000000000000 Binary files a/airflow-core/docs/img/task_instance_history_log.png and /dev/null differ diff --git a/airflow-core/docs/img/ui-alert-message-markdown.png b/airflow-core/docs/img/ui-alert-message-markdown.png new file mode 100755 index 0000000000000..39b7843cad7b9 Binary files /dev/null and b/airflow-core/docs/img/ui-alert-message-markdown.png differ diff --git a/airflow-core/docs/img/ui-alert-message.png b/airflow-core/docs/img/ui-alert-message.png index 5f436f9a0aeba..2e419b8de8cb4 100644 Binary files a/airflow-core/docs/img/ui-alert-message.png and b/airflow-core/docs/img/ui-alert-message.png differ diff --git a/airflow-core/docs/img/ui-dark/admin_connections.png b/airflow-core/docs/img/ui-dark/admin_connections.png new file mode 100644 index 0000000000000..119a57b54d265 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/admin_connections.png differ diff --git a/airflow-core/docs/img/ui-dark/admin_connections_add.png b/airflow-core/docs/img/ui-dark/admin_connections_add.png new file mode 100644 index 0000000000000..ff2ebdbd1920e Binary files /dev/null and b/airflow-core/docs/img/ui-dark/admin_connections_add.png differ diff --git a/airflow-core/docs/img/ui-dark/asset_list_consuming_dags.png b/airflow-core/docs/img/ui-dark/asset_list_consuming_dags.png new file mode 100644 index 0000000000000..d9b40acc4a5ef Binary files /dev/null and b/airflow-core/docs/img/ui-dark/asset_list_consuming_dags.png differ diff --git a/airflow-core/docs/img/ui-dark/asset_scheduled_dags.png b/airflow-core/docs/img/ui-dark/asset_scheduled_dags.png new file mode 100644 index 0000000000000..55a2d37d535fb Binary files /dev/null and b/airflow-core/docs/img/ui-dark/asset_scheduled_dags.png differ diff --git a/airflow-core/docs/img/ui-dark/asset_view.png b/airflow-core/docs/img/ui-dark/asset_view.png new file mode 100644 index 0000000000000..a42d0eca5d144 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/asset_view.png differ diff --git a/airflow-core/docs/img/ui-dark/assets.png b/airflow-core/docs/img/ui-dark/assets.png new file mode 100644 index 0000000000000..14b3c19c17c75 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/assets.png differ diff --git a/airflow-core/docs/img/ui-dark/assets_graph.png b/airflow-core/docs/img/ui-dark/assets_graph.png new file mode 100644 index 0000000000000..5250eb4f3887f Binary files /dev/null and b/airflow-core/docs/img/ui-dark/assets_graph.png differ diff --git a/airflow-core/docs/img/ui-dark/backfill.png b/airflow-core/docs/img/ui-dark/backfill.png new file mode 100644 index 0000000000000..e84ea82c259f9 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/backfill.png differ diff --git a/airflow-core/docs/img/ui-dark/basic_dag.png b/airflow-core/docs/img/ui-dark/basic_dag.png new file mode 100644 index 0000000000000..f62203ea869b1 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/basic_dag.png differ diff --git a/airflow-core/docs/img/ui-dark/branch_note.png b/airflow-core/docs/img/ui-dark/branch_note.png new file mode 100644 index 0000000000000..6e27b68a86b04 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/branch_note.png differ diff --git a/airflow-core/docs/img/ui-dark/branch_with_trigger.png b/airflow-core/docs/img/ui-dark/branch_with_trigger.png new file mode 100644 index 0000000000000..04467635005c8 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/branch_with_trigger.png differ diff --git a/airflow-core/docs/img/ui-dark/branch_without_trigger.png b/airflow-core/docs/img/ui-dark/branch_without_trigger.png new file mode 100644 index 0000000000000..6c30d728ca892 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/branch_without_trigger.png differ diff --git a/airflow-core/docs/img/ui-dark/code.png b/airflow-core/docs/img/ui-dark/code.png new file mode 100644 index 0000000000000..58ab7bb78a86d Binary files /dev/null and b/airflow-core/docs/img/ui-dark/code.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_graph_all_dependencies.png b/airflow-core/docs/img/ui-dark/dag_graph_all_dependencies.png new file mode 100644 index 0000000000000..08f360734daa1 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_graph_all_dependencies.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_graph_external_conditions.png b/airflow-core/docs/img/ui-dark/dag_graph_external_conditions.png new file mode 100644 index 0000000000000..38537e1850bb7 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_graph_external_conditions.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_list.png b/airflow-core/docs/img/ui-dark/dag_list.png new file mode 100644 index 0000000000000..c7b378a459470 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_list.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_list_asset_condition_popup.png b/airflow-core/docs/img/ui-dark/dag_list_asset_condition_popup.png new file mode 100644 index 0000000000000..ef85b6fab459f Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_list_asset_condition_popup.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_code.png b/airflow-core/docs/img/ui-dark/dag_overview_code.png new file mode 100644 index 0000000000000..74243c170283c Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_code.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_dashboard.png b/airflow-core/docs/img/ui-dark/dag_overview_dashboard.png new file mode 100644 index 0000000000000..e851cbf07a580 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_dashboard.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_details.png b/airflow-core/docs/img/ui-dark/dag_overview_details.png new file mode 100644 index 0000000000000..c3c7222060011 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_details.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_events.png b/airflow-core/docs/img/ui-dark/dag_overview_events.png new file mode 100644 index 0000000000000..4523ca77fb809 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_events.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_graph.png b/airflow-core/docs/img/ui-dark/dag_overview_graph.png new file mode 100644 index 0000000000000..52e1f471bc5b2 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_graph.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_grid.png b/airflow-core/docs/img/ui-dark/dag_overview_grid.png new file mode 100644 index 0000000000000..2b866d8e6b7f2 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_grid.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_runs.png b/airflow-core/docs/img/ui-dark/dag_overview_runs.png new file mode 100644 index 0000000000000..cda308df78548 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_runs.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_overview_tasks.png b/airflow-core/docs/img/ui-dark/dag_overview_tasks.png new file mode 100644 index 0000000000000..5eaa24aaf7ab9 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_overview_tasks.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_run_code_hello_airflow.png b/airflow-core/docs/img/ui-dark/dag_run_code_hello_airflow.png new file mode 100644 index 0000000000000..291e4d906596b Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_run_code_hello_airflow.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_run_code_hello_world.png b/airflow-core/docs/img/ui-dark/dag_run_code_hello_world.png new file mode 100644 index 0000000000000..d244452e8b42c Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_run_code_hello_world.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_run_details.png b/airflow-core/docs/img/ui-dark/dag_run_details.png new file mode 100644 index 0000000000000..fe0a1648d035a Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_run_details.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_run_graph.png b/airflow-core/docs/img/ui-dark/dag_run_graph.png new file mode 100644 index 0000000000000..09b4b897458a3 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_run_graph.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_run_task_instance_xcom.png b/airflow-core/docs/img/ui-dark/dag_run_task_instance_xcom.png new file mode 100644 index 0000000000000..e0d16c6461f91 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_run_task_instance_xcom.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_run_task_instances.png b/airflow-core/docs/img/ui-dark/dag_run_task_instances.png new file mode 100644 index 0000000000000..cebbae11c8b95 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_run_task_instances.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_task_instance_details.png b/airflow-core/docs/img/ui-dark/dag_task_instance_details.png new file mode 100644 index 0000000000000..d6dd08f9ec51e Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_task_instance_details.png differ diff --git a/airflow-core/docs/img/ui-dark/dag_task_instance_logs.png b/airflow-core/docs/img/ui-dark/dag_task_instance_logs.png new file mode 100644 index 0000000000000..253146a159462 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dag_task_instance_logs.png differ diff --git a/airflow-core/docs/img/ui-dark/dags.png b/airflow-core/docs/img/ui-dark/dags.png new file mode 100644 index 0000000000000..56458caedef62 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/dags.png differ diff --git a/airflow-core/docs/img/ui-dark/edge_label_example.png b/airflow-core/docs/img/ui-dark/edge_label_example.png new file mode 100644 index 0000000000000..70cba3d1c6479 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/edge_label_example.png differ diff --git a/airflow-core/docs/img/ui-dark/events.png b/airflow-core/docs/img/ui-dark/events.png new file mode 100644 index 0000000000000..3dbcfcdcbc88e Binary files /dev/null and b/airflow-core/docs/img/ui-dark/events.png differ diff --git a/airflow-core/docs/img/ui-dark/graph.png b/airflow-core/docs/img/ui-dark/graph.png new file mode 100644 index 0000000000000..3679c7bad4705 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/graph.png differ diff --git a/airflow-core/docs/img/ui-dark/graph_dependencies.png b/airflow-core/docs/img/ui-dark/graph_dependencies.png new file mode 100644 index 0000000000000..ff9f681e5c3c9 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/graph_dependencies.png differ diff --git a/airflow-core/docs/img/ui-dark/grid.png b/airflow-core/docs/img/ui-dark/grid.png new file mode 100644 index 0000000000000..6e81f7d968ae9 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/grid.png differ diff --git a/airflow-core/docs/img/ui-dark/grid_instance_details.png b/airflow-core/docs/img/ui-dark/grid_instance_details.png new file mode 100644 index 0000000000000..bad26eebf4647 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/grid_instance_details.png differ diff --git a/airflow-core/docs/img/ui-dark/grid_mapped_task.png b/airflow-core/docs/img/ui-dark/grid_mapped_task.png new file mode 100644 index 0000000000000..fce5369f4a2fc Binary files /dev/null and b/airflow-core/docs/img/ui-dark/grid_mapped_task.png differ diff --git a/airflow-core/docs/img/ui-dark/grid_run_details.png b/airflow-core/docs/img/ui-dark/grid_run_details.png new file mode 100644 index 0000000000000..47aa122c011ee Binary files /dev/null and b/airflow-core/docs/img/ui-dark/grid_run_details.png differ diff --git a/airflow-core/docs/img/ui-dark/grid_task_details.png b/airflow-core/docs/img/ui-dark/grid_task_details.png new file mode 100644 index 0000000000000..f79a0ed22276c Binary files /dev/null and b/airflow-core/docs/img/ui-dark/grid_task_details.png differ diff --git a/airflow-core/docs/img/ui-dark/grid_task_group.png b/airflow-core/docs/img/ui-dark/grid_task_group.png new file mode 100644 index 0000000000000..e22e632853ee5 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/grid_task_group.png differ diff --git a/airflow-core/docs/img/ui-dark/home.png b/airflow-core/docs/img/ui-dark/home.png new file mode 100644 index 0000000000000..deb203d4ba3cf Binary files /dev/null and b/airflow-core/docs/img/ui-dark/home.png differ diff --git a/airflow-core/docs/img/ui-dark/home_dark.png b/airflow-core/docs/img/ui-dark/home_dark.png new file mode 100644 index 0000000000000..6436ff4338499 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/home_dark.png differ diff --git a/airflow-core/docs/img/ui-dark/mapping_simple_graph.png b/airflow-core/docs/img/ui-dark/mapping_simple_graph.png new file mode 100644 index 0000000000000..bfca63bfbcb24 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/mapping_simple_graph.png differ diff --git a/airflow-core/docs/img/ui-dark/run_types.png b/airflow-core/docs/img/ui-dark/run_types.png new file mode 100644 index 0000000000000..5816513b5d60d Binary files /dev/null and b/airflow-core/docs/img/ui-dark/run_types.png differ diff --git a/airflow-core/docs/img/ui-dark/task_group.gif b/airflow-core/docs/img/ui-dark/task_group.gif new file mode 100644 index 0000000000000..88e74965ddede Binary files /dev/null and b/airflow-core/docs/img/ui-dark/task_group.gif differ diff --git a/airflow-core/docs/img/ui-dark/task_instance_history.png b/airflow-core/docs/img/ui-dark/task_instance_history.png new file mode 100644 index 0000000000000..4755ac4038cf0 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/task_instance_history.png differ diff --git a/airflow-core/docs/img/ui-dark/task_instance_history_log.png b/airflow-core/docs/img/ui-dark/task_instance_history_log.png new file mode 100644 index 0000000000000..36a45b06bdd6b Binary files /dev/null and b/airflow-core/docs/img/ui-dark/task_instance_history_log.png differ diff --git a/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-1.png b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-1.png new file mode 100644 index 0000000000000..abc253fa94cba Binary files /dev/null and b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-1.png differ diff --git a/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-2.png b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-2.png new file mode 100644 index 0000000000000..c7551ff142140 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-2.png differ diff --git a/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-3.png b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-3.png new file mode 100644 index 0000000000000..9c6c33ce9c2db Binary files /dev/null and b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-3.png differ diff --git a/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-4.png b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-4.png new file mode 100644 index 0000000000000..f415de98148a4 Binary files /dev/null and b/airflow-core/docs/img/ui-dark/trigger-dag-tutorial-form-4.png differ diff --git a/airflow-core/docs/img/ui-light/asset_list_consuming_dags.png b/airflow-core/docs/img/ui-light/asset_list_consuming_dags.png new file mode 100644 index 0000000000000..e2967719b0fb5 Binary files /dev/null and b/airflow-core/docs/img/ui-light/asset_list_consuming_dags.png differ diff --git a/airflow-core/docs/img/ui-light/asset_scheduled_dags.png b/airflow-core/docs/img/ui-light/asset_scheduled_dags.png new file mode 100644 index 0000000000000..261ce4994ca8b Binary files /dev/null and b/airflow-core/docs/img/ui-light/asset_scheduled_dags.png differ diff --git a/airflow-core/docs/img/ui-light/asset_view.png b/airflow-core/docs/img/ui-light/asset_view.png new file mode 100644 index 0000000000000..69cc8ff5089ec Binary files /dev/null and b/airflow-core/docs/img/ui-light/asset_view.png differ diff --git a/airflow-core/docs/img/ui-light/assets.png b/airflow-core/docs/img/ui-light/assets.png new file mode 100644 index 0000000000000..23bec52c0e452 Binary files /dev/null and b/airflow-core/docs/img/ui-light/assets.png differ diff --git a/airflow-core/docs/img/ui-light/assets_graph.png b/airflow-core/docs/img/ui-light/assets_graph.png new file mode 100644 index 0000000000000..7e1c711a70e04 Binary files /dev/null and b/airflow-core/docs/img/ui-light/assets_graph.png differ diff --git a/airflow-core/docs/img/ui-light/backfill.png b/airflow-core/docs/img/ui-light/backfill.png new file mode 100644 index 0000000000000..5576d7948f4bd Binary files /dev/null and b/airflow-core/docs/img/ui-light/backfill.png differ diff --git a/airflow-core/docs/img/ui-light/basic_dag.png b/airflow-core/docs/img/ui-light/basic_dag.png new file mode 100644 index 0000000000000..458ee61544bba Binary files /dev/null and b/airflow-core/docs/img/ui-light/basic_dag.png differ diff --git a/airflow-core/docs/img/ui-light/branch_note.png b/airflow-core/docs/img/ui-light/branch_note.png new file mode 100644 index 0000000000000..0a3d923d1a906 Binary files /dev/null and b/airflow-core/docs/img/ui-light/branch_note.png differ diff --git a/airflow-core/docs/img/ui-light/branch_with_trigger.png b/airflow-core/docs/img/ui-light/branch_with_trigger.png new file mode 100644 index 0000000000000..eb171565321ad Binary files /dev/null and b/airflow-core/docs/img/ui-light/branch_with_trigger.png differ diff --git a/airflow-core/docs/img/ui-light/branch_without_trigger.png b/airflow-core/docs/img/ui-light/branch_without_trigger.png new file mode 100644 index 0000000000000..c2d726506c50a Binary files /dev/null and b/airflow-core/docs/img/ui-light/branch_without_trigger.png differ diff --git a/airflow-core/docs/img/ui-light/code.png b/airflow-core/docs/img/ui-light/code.png new file mode 100644 index 0000000000000..36c0cfb797687 Binary files /dev/null and b/airflow-core/docs/img/ui-light/code.png differ diff --git a/airflow-core/docs/img/ui-light/dag_graph_all_dependencies.png b/airflow-core/docs/img/ui-light/dag_graph_all_dependencies.png new file mode 100644 index 0000000000000..e091c5225c056 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_graph_all_dependencies.png differ diff --git a/airflow-core/docs/img/ui-light/dag_graph_external_conditions.png b/airflow-core/docs/img/ui-light/dag_graph_external_conditions.png new file mode 100644 index 0000000000000..1c4b2e0d69ecc Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_graph_external_conditions.png differ diff --git a/airflow-core/docs/img/ui-light/dag_list.png b/airflow-core/docs/img/ui-light/dag_list.png new file mode 100644 index 0000000000000..ceb1065dcc468 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_list.png differ diff --git a/airflow-core/docs/img/ui-light/dag_list_asset_condition_popup.png b/airflow-core/docs/img/ui-light/dag_list_asset_condition_popup.png new file mode 100644 index 0000000000000..785b91a033323 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_list_asset_condition_popup.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_code.png b/airflow-core/docs/img/ui-light/dag_overview_code.png new file mode 100644 index 0000000000000..6ee86c3f1216a Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_code.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_dashboard.png b/airflow-core/docs/img/ui-light/dag_overview_dashboard.png new file mode 100644 index 0000000000000..d864a89dd37f6 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_dashboard.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_details.png b/airflow-core/docs/img/ui-light/dag_overview_details.png new file mode 100644 index 0000000000000..cfb18a3cfca23 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_details.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_events.png b/airflow-core/docs/img/ui-light/dag_overview_events.png new file mode 100644 index 0000000000000..39213ece77c33 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_events.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_graph.png b/airflow-core/docs/img/ui-light/dag_overview_graph.png new file mode 100644 index 0000000000000..1ea3ee566c67d Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_graph.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_grid.png b/airflow-core/docs/img/ui-light/dag_overview_grid.png new file mode 100644 index 0000000000000..a5b7d8cdc4cce Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_grid.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_runs.png b/airflow-core/docs/img/ui-light/dag_overview_runs.png new file mode 100644 index 0000000000000..069d2b4488743 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_runs.png differ diff --git a/airflow-core/docs/img/ui-light/dag_overview_tasks.png b/airflow-core/docs/img/ui-light/dag_overview_tasks.png new file mode 100644 index 0000000000000..5db41309b8f21 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_overview_tasks.png differ diff --git a/airflow-core/docs/img/ui-light/dag_run_code_hello_airflow.png b/airflow-core/docs/img/ui-light/dag_run_code_hello_airflow.png new file mode 100644 index 0000000000000..69932d954bf4b Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_run_code_hello_airflow.png differ diff --git a/airflow-core/docs/img/ui-light/dag_run_code_hello_world.png b/airflow-core/docs/img/ui-light/dag_run_code_hello_world.png new file mode 100644 index 0000000000000..4b73719ad945b Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_run_code_hello_world.png differ diff --git a/airflow-core/docs/img/ui-light/dag_run_details.png b/airflow-core/docs/img/ui-light/dag_run_details.png new file mode 100644 index 0000000000000..b0a271e2342dc Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_run_details.png differ diff --git a/airflow-core/docs/img/ui-light/dag_run_graph.png b/airflow-core/docs/img/ui-light/dag_run_graph.png new file mode 100644 index 0000000000000..31dccef7612ee Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_run_graph.png differ diff --git a/airflow-core/docs/img/ui-light/dag_run_task_instance_xcom.png b/airflow-core/docs/img/ui-light/dag_run_task_instance_xcom.png new file mode 100644 index 0000000000000..ab2c387f13dbb Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_run_task_instance_xcom.png differ diff --git a/airflow-core/docs/img/ui-light/dag_run_task_instances.png b/airflow-core/docs/img/ui-light/dag_run_task_instances.png new file mode 100644 index 0000000000000..42c71c958dec1 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_run_task_instances.png differ diff --git a/airflow-core/docs/img/ui-light/dag_task_instance_details.png b/airflow-core/docs/img/ui-light/dag_task_instance_details.png new file mode 100644 index 0000000000000..e2a3be93226ba Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_task_instance_details.png differ diff --git a/airflow-core/docs/img/ui-light/dag_task_instance_logs.png b/airflow-core/docs/img/ui-light/dag_task_instance_logs.png new file mode 100644 index 0000000000000..9ed7f2824e560 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dag_task_instance_logs.png differ diff --git a/airflow-core/docs/img/ui-light/dags.png b/airflow-core/docs/img/ui-light/dags.png new file mode 100644 index 0000000000000..0c408515fb172 Binary files /dev/null and b/airflow-core/docs/img/ui-light/dags.png differ diff --git a/airflow-core/docs/img/ui-light/edge_label_example.png b/airflow-core/docs/img/ui-light/edge_label_example.png new file mode 100644 index 0000000000000..4a51ded7f4703 Binary files /dev/null and b/airflow-core/docs/img/ui-light/edge_label_example.png differ diff --git a/airflow-core/docs/img/ui-light/events.png b/airflow-core/docs/img/ui-light/events.png new file mode 100644 index 0000000000000..d27ea0366edbe Binary files /dev/null and b/airflow-core/docs/img/ui-light/events.png differ diff --git a/airflow-core/docs/img/ui-light/graph.png b/airflow-core/docs/img/ui-light/graph.png new file mode 100644 index 0000000000000..daca11ce001a2 Binary files /dev/null and b/airflow-core/docs/img/ui-light/graph.png differ diff --git a/airflow-core/docs/img/ui-light/graph_dependencies.png b/airflow-core/docs/img/ui-light/graph_dependencies.png new file mode 100644 index 0000000000000..8253dfdc613b7 Binary files /dev/null and b/airflow-core/docs/img/ui-light/graph_dependencies.png differ diff --git a/airflow-core/docs/img/ui-light/grid.png b/airflow-core/docs/img/ui-light/grid.png new file mode 100644 index 0000000000000..e0770a5eff093 Binary files /dev/null and b/airflow-core/docs/img/ui-light/grid.png differ diff --git a/airflow-core/docs/img/ui-light/grid_instance_details.png b/airflow-core/docs/img/ui-light/grid_instance_details.png new file mode 100644 index 0000000000000..ebe9a1011c409 Binary files /dev/null and b/airflow-core/docs/img/ui-light/grid_instance_details.png differ diff --git a/airflow-core/docs/img/ui-light/grid_mapped_task.png b/airflow-core/docs/img/ui-light/grid_mapped_task.png new file mode 100644 index 0000000000000..0ec788086ba78 Binary files /dev/null and b/airflow-core/docs/img/ui-light/grid_mapped_task.png differ diff --git a/airflow-core/docs/img/ui-light/grid_run_details.png b/airflow-core/docs/img/ui-light/grid_run_details.png new file mode 100644 index 0000000000000..9bf6542888c79 Binary files /dev/null and b/airflow-core/docs/img/ui-light/grid_run_details.png differ diff --git a/airflow-core/docs/img/ui-light/grid_task_details.png b/airflow-core/docs/img/ui-light/grid_task_details.png new file mode 100644 index 0000000000000..38c8ed66ef1a2 Binary files /dev/null and b/airflow-core/docs/img/ui-light/grid_task_details.png differ diff --git a/airflow-core/docs/img/ui-light/grid_task_group.png b/airflow-core/docs/img/ui-light/grid_task_group.png new file mode 100644 index 0000000000000..ae59f732da459 Binary files /dev/null and b/airflow-core/docs/img/ui-light/grid_task_group.png differ diff --git a/airflow-core/docs/img/ui-light/home.png b/airflow-core/docs/img/ui-light/home.png new file mode 100644 index 0000000000000..49d215825a8d7 Binary files /dev/null and b/airflow-core/docs/img/ui-light/home.png differ diff --git a/airflow-core/docs/img/ui-light/home_light.png b/airflow-core/docs/img/ui-light/home_light.png new file mode 100644 index 0000000000000..e74307bb770c5 Binary files /dev/null and b/airflow-core/docs/img/ui-light/home_light.png differ diff --git a/airflow-core/docs/img/ui-light/mapping_simple_graph.png b/airflow-core/docs/img/ui-light/mapping_simple_graph.png new file mode 100644 index 0000000000000..6216ce599f2b9 Binary files /dev/null and b/airflow-core/docs/img/ui-light/mapping_simple_graph.png differ diff --git a/airflow-core/docs/img/ui-light/run_types.png b/airflow-core/docs/img/ui-light/run_types.png new file mode 100644 index 0000000000000..5e97757c9df43 Binary files /dev/null and b/airflow-core/docs/img/ui-light/run_types.png differ diff --git a/airflow-core/docs/img/ui-light/task_group.gif b/airflow-core/docs/img/ui-light/task_group.gif new file mode 100644 index 0000000000000..743e86667d3cb Binary files /dev/null and b/airflow-core/docs/img/ui-light/task_group.gif differ diff --git a/airflow-core/docs/img/ui-light/task_instance_history.png b/airflow-core/docs/img/ui-light/task_instance_history.png new file mode 100644 index 0000000000000..27835950e7cca Binary files /dev/null and b/airflow-core/docs/img/ui-light/task_instance_history.png differ diff --git a/airflow-core/docs/img/ui-light/task_instance_history_log.png b/airflow-core/docs/img/ui-light/task_instance_history_log.png new file mode 100644 index 0000000000000..afcccd0f2898e Binary files /dev/null and b/airflow-core/docs/img/ui-light/task_instance_history_log.png differ diff --git a/airflow-core/docs/img/trigger-dag-tutorial-form-1.png b/airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-1.png similarity index 100% rename from airflow-core/docs/img/trigger-dag-tutorial-form-1.png rename to airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-1.png diff --git a/airflow-core/docs/img/trigger-dag-tutorial-form-2.png b/airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-2.png similarity index 100% rename from airflow-core/docs/img/trigger-dag-tutorial-form-2.png rename to airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-2.png diff --git a/airflow-core/docs/img/trigger-dag-tutorial-form-3.png b/airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-3.png similarity index 100% rename from airflow-core/docs/img/trigger-dag-tutorial-form-3.png rename to airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-3.png diff --git a/airflow-core/docs/img/trigger-dag-tutorial-form-4.png b/airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-4.png similarity index 100% rename from airflow-core/docs/img/trigger-dag-tutorial-form-4.png rename to airflow-core/docs/img/ui-light/trigger-dag-tutorial-form-4.png diff --git a/airflow-core/docs/index.rst b/airflow-core/docs/index.rst index a7a21e671c981..1b48a4a3b4450 100644 --- a/airflow-core/docs/index.rst +++ b/airflow-core/docs/index.rst @@ -32,6 +32,15 @@ Airflow workflows are defined entirely in Python. This "workflows as code" appro - **Extensible**: The Airflow framework includes a wide range of built-in operators and can be extended to fit your needs. - **Flexible**: Airflow leverages the `Jinja `_ templating engine, allowing rich customizations. +.. _task-sdk-docs: + +Task SDK +======== + +For Airflow Task SDK, see the standalone reference & tutorial site: + +:doc:`task-sdk:index` + Dags ----------------------------------------- @@ -71,16 +80,16 @@ Airflow parses the script, schedules the tasks, and executes them in the defined is displayed in the web interface: .. image:: /img/ui-dark/demo_graph_and_code_view.png - :alt: Demo DAG in the Graph View, showing the status of one DAG run along with DAG code. + :alt: Demo dag in the Graph View, showing the status of one dag run along with dag code. | -This examples uses a simple Bash command and Python function, but Airflow tasks can run virtually any code. You might use +This example uses a simple Bash command and Python function, but Airflow tasks can run virtually any code. You might use tasks to run a Spark job, move files between storage buckets, or send a notification email. Here's what that same dag looks like over time, with multiple runs: .. image:: /img/ui-dark/demo_grid_view_with_task_logs.png - :alt: Demo DAG in the Grid View, showing the status of all DAG runs, as well as logs for a task instance + :alt: Demo dag in the Grid View, showing the status of all dag runs, as well as logs for a task instance | @@ -88,7 +97,7 @@ Each column in the grid represents a single dag run. While the graph and grid vi several other views to help you monitor and troubleshoot workflows — such as the ``DAG Overview`` view: .. image:: /img/ui-dark/demo_dag_overview_with_failed_tasks.png - :alt: Overview of a complex DAG in the Grid View, showing the status of all DAG runs, as well as quick links to recently failed task logs + :alt: Overview of a complex dag in the Grid View, showing the status of all dag runs, as well as quick links to recently failed task logs | @@ -101,7 +110,7 @@ Why Airflow®? Airflow is a platform for orchestrating batch workflows. It offers a flexible framework with a wide range of built-in operators and makes it easy to integrate with new technologies. -If your workflows have a clear start and end and run on a schedule, they're a great fit for Airflow DAGs. +If your workflows have a clear start and end and run on a schedule, they're a great fit for Airflow dags. If you prefer coding over clicking, Airflow is built for you. Defining workflows as Python code provides several key benefits: @@ -111,7 +120,7 @@ If you prefer coding over clicking, Airflow is built for you. Defining workflows - **Extensibility**: Customize workflows using a large ecosystem of existing components — or build your own. Airflow's rich scheduling and execution semantics make it easy to define complex, recurring pipelines. From the web interface, -you can manually trigger DAGs, inspect logs, and monitor task status. You can also backfill DAG runs to process historical +you can manually trigger dags, inspect logs, and monitor task status. You can also backfill dag runs to process historical data, or rerun only failed tasks to minimize cost and time. The Airflow platform is highly customizable. With the :doc:`public-airflow-interface` you can extend and adapt nearly @@ -124,7 +133,7 @@ others via the `community `_, `Slack start diff --git a/airflow-core/docs/installation/dependencies.rst b/airflow-core/docs/installation/dependencies.rst index 515b2a9d3c8db..e41e03ab46a41 100644 --- a/airflow-core/docs/installation/dependencies.rst +++ b/airflow-core/docs/installation/dependencies.rst @@ -23,7 +23,7 @@ Airflow extra dependencies The ``apache-airflow`` PyPI basic package only installs what's needed to get started. Additional packages can be installed depending on what will be useful in your -environment. For instance, if you don't need connectivity with Postgres, +environment. For instance, if you don't need connectivity with PostgreSQL, you won't have to go through the trouble of installing the ``postgres-devel`` yum package, or whatever equivalent applies on the distribution you are using. @@ -55,7 +55,7 @@ Just to prevent confusion of extras versus providers: Extras and providers are d though many extras are leading to installing providers. Extras are standard Python setuptools feature that allows to add additional set of dependencies as -optional features to "core" Apache Airflow. One of the type of such optional features are providers +optional features to "core" Apache Airflow. One type of such optional features is providers packages, but not all optional features of Apache Airflow have corresponding providers. We are using the ``extras`` setuptools features to also install providers. diff --git a/airflow-core/docs/installation/index.rst b/airflow-core/docs/installation/index.rst index 492dd76c68ce8..f6a327444a27c 100644 --- a/airflow-core/docs/installation/index.rst +++ b/airflow-core/docs/installation/index.rst @@ -35,12 +35,12 @@ Installation of Airflow® Upgrading Upgrading to Airflow 3 -This page describes installations options that you might use when considering how to install Airflow®. +This page describes installation options that you might use when considering how to install Airflow®. Airflow consists of many components, often distributed among many physical or virtual machines, therefore installation of Airflow might be quite complex, depending on the options you choose. -You should also check-out the :doc:`Prerequisites ` that must be fulfilled when installing Airflow -as well as :doc:`Supported versions ` to know what are the policies for supporting +You should also check out the :doc:`Prerequisites ` that must be fulfilled when installing Airflow +as well as :doc:`Supported versions ` to know what are the policies for the supporting Airflow, Python and Kubernetes. Airflow requires additional :doc:`Dependencies ` to be installed - which can be done @@ -68,9 +68,9 @@ More details: :doc:`installing-from-sources` **What are you expected to handle** -* You are expected to build and install airflow and its components on your own. +* You are expected to build and install Airflow and its components on your own. * You should develop and handle the deployment for all components of Airflow. -* You are responsible for setting up database, creating and managing database schema with ``airflow db`` commands, +* You are responsible for setting up the database, creating and managing database schema with ``airflow db`` commands, automated startup and recovery, maintenance, cleanup and upgrades of Airflow and the Airflow Providers. * You need to setup monitoring of your system allowing you to observe resources and react to problems. * You are expected to configure and manage appropriate resources for the installation (memory, CPU, etc) based @@ -84,14 +84,14 @@ More details: :doc:`installing-from-sources` **Where to ask for help** -* The ``#user-troubleshooting`` channel on slack can be used for quick general troubleshooting questions. The +* The ``#user-troubleshooting`` channel on Slack can be used for quick general troubleshooting questions. The `GitHub discussions `__ if you look for longer discussion and have more information to share. -* The ``#user-best-practices`` channel on slack can be used to ask for and share best practices on using and deploying airflow. +* The ``#user-best-practices`` channel on Slack can be used to ask for and share best practices on using and deploying Airflow. * If you can provide description of a reproducible problem with Airflow software, you can open issue at `GitHub issues `_ -* If you want to contribute back to Airflow, the ``#contributors`` slack channel for building the Airflow itself +* If you want to contribute back to Airflow, the ``#contributors`` Slack channel for building the Airflow itself Using PyPI @@ -103,7 +103,7 @@ More details: :doc:`/installation/installing-from-pypi` * This installation method is useful when you are not familiar with Containers and Docker and want to install Apache Airflow on physical or virtual machines and you are used to installing and running software using custom - deployment mechanism. + deployment mechanisms. * The only officially supported mechanism of installation is via ``pip`` using constraint mechanisms. The constraint files are managed by Apache Airflow release managers to make sure that you can repeatably install Airflow from PyPI with all Providers and @@ -122,7 +122,7 @@ More details: :doc:`/installation/installing-from-pypi` * You are expected to install Airflow - all components of it - on your own. * You should develop and handle the deployment for all components of Airflow. -* You are responsible for setting up database, creating and managing database schema with ``airflow db`` commands, +* You are responsible for setting up the database, creating and managing database schema with ``airflow db`` commands, automated startup and recovery, maintenance, cleanup and upgrades of Airflow and Airflow Providers. * You need to setup monitoring of your system allowing you to observe resources and react to problems. * You are expected to configure and manage appropriate resources for the installation (memory, CPU, etc) based @@ -144,8 +144,8 @@ More details: :doc:`/installation/installing-from-pypi` * The ``#user-troubleshooting`` channel on Airflow Slack for quick general troubleshooting questions. The `GitHub discussions `__ if you look for longer discussion and have more information to share. -* The ``#user-best-practices`` channel on slack can be used to ask for and share best - practices on using and deploying airflow. +* The ``#user-best-practices`` channel on Slack can be used to ask for and share best + practices on using and deploying Airflow. * If you can provide description of a reproducible problem with Airflow software, you can open issue at `GitHub issues `__ @@ -162,7 +162,7 @@ running Airflow components in isolation from other software running on the same maintenance of dependencies. The images are built by Apache Airflow release managers and they use officially released packages from PyPI -and official constraint files- same that are used for installing Airflow from PyPI. +and official constraint files - same that are used for installing Airflow from PyPI. **Intended users** @@ -174,16 +174,16 @@ and official constraint files- same that are used for installing Airflow from Py * You are expected to be able to customize or extend Container/Docker images if you want to add extra dependencies. You are expected to put together a deployment built of several containers - (for example using docker-compose) and to make sure that they are linked together. -* You are responsible for setting up database, creating and managing database schema with ``airflow db`` commands, + (for example using ``docker-compose``) and to make sure that they are linked together. +* You are responsible for setting up the database, creating and managing database schema with ``airflow db`` commands, automated startup and recovery, maintenance, cleanup and upgrades of Airflow and the Airflow Providers. * You are responsible to manage your own customizations and extensions for your custom dependencies. With the Official Airflow Docker Images, upgrades of Airflow and Airflow Providers which are part of the reference image are handled by the community - you need to make sure to pick up - those changes when released by upgrading the base image. However, you are responsible in creating a + those changes when released by upgrading the base image. However, you are responsible for creating a pipeline of building your own custom images with your own added dependencies and Providers and need to repeat the customization step and building your own image when new version of Airflow image is released. -* You should choose the right deployment mechanism. There a number of available options of +* You should choose the right deployment mechanism. There are a number of available options of deployments of containers. You can use your own custom mechanism, custom Kubernetes deployments, custom Docker Compose, custom Helm charts etc., and you should choose it based on your experience and expectations. @@ -208,8 +208,8 @@ and official constraint files- same that are used for installing Airflow from Py * The ``#user-troubleshooting`` channel on Airflow Slack for quick general troubleshooting questions. The `GitHub discussions `__ if you look for longer discussion and have more information to share. -* The ``#user-best-practices`` channel on slack can be used to ask for and share best - practices on using and deploying airflow. +* The ``#user-best-practices`` channel on Slack can be used to ask for and share best + practices on using and deploying Airflow. * If you can provide description of a reproducible problem with Airflow software, you can open issue at `GitHub issues `__ @@ -246,7 +246,7 @@ More details: :doc:`helm-chart:index` * You are responsible to manage your own customizations and extensions for your custom dependencies. With the Official Airflow Docker Images, upgrades of Airflow and Airflow Providers which are part of the reference image are handled by the community - you need to make sure to pick up - those changes when released by upgrading the base image. However, you are responsible in creating a + those changes when released by upgrading the base image. However, you are responsible for creating a pipeline of building your own custom images with your own added dependencies and Providers and need to repeat the customization step and building your own image when new version of Airflow image is released. * You need to setup monitoring of your system allowing you to observe resources and react to problems. @@ -267,8 +267,8 @@ More details: :doc:`helm-chart:index` * The ``#user-troubleshooting`` channel on Airflow Slack for quick general troubleshooting questions. The `GitHub discussions `__ if you look for longer discussion and have more information to share. -* The ``#user-best-practices`` channel on slack can be used to ask for and share best - practices on using and deploying airflow. +* The ``#user-best-practices`` channel on Slack can be used to ask for and share best + practices on using and deploying Airflow. * If you can provide description of a reproducible problem with Airflow software, you can open issue at `GitHub issues `__ @@ -347,7 +347,7 @@ The requirements that Airflow might need depend on many factors, including (but the technology/cloud/integration of monitoring etc. * Technical details of database, hardware, network, etc. that your deployment is running on * The complexity of the code you add to your DAGS, configuration, plugins, settings etc. (note, that - Airflow runs the code that DAG author and Deployment Manager provide) + Airflow runs the code that Dag author and Deployment Manager provide) * The number and choice of providers you install and use (Airflow has more than 80 providers) that can be installed by choice of the Deployment Manager and using them might require more resources. * The choice of parameters that you use when tuning Airflow. Airflow has many configuration parameters @@ -372,15 +372,15 @@ control theory - where there are two types of systems: 2. Complex systems with multiple variables, that are hard to predict and where you need to monitor the system and adjust the knobs continuously to make sure the system is running smoothly. -Airflow (and generally any modern system running usually on cloud services, with multiple layers responsible -for resources as well multiple parameters to control their behaviour) is a complex system and they fall +Airflow (and generally any modern systems running usually on cloud services, with multiple layers responsible +for resources as well multiple parameters to control their behaviour) is a complex system and it fall much more in the second category. If you decide to run Airflow in production on your own, you should be prepared for the monitor/observe/adjust feedback loop to make sure the system is running smoothly. Having a good monitoring system that will allow you to monitor the system and adjust the parameters is a must to put that in practice. -There are few guidelines that you can use for optimizing your resource usage as well. The +There are a few guidelines that you can use for optimizing your resource usage as well. The :ref:`fine-tuning-scheduler` is a good starting point to fine-tune your scheduler, you can also follow the :ref:`best_practice` guide to make sure you are using Airflow in the most efficient way. diff --git a/airflow-core/docs/installation/installing-from-pypi.rst b/airflow-core/docs/installation/installing-from-pypi.rst index 542c7463fded4..d2d9d6507d019 100644 --- a/airflow-core/docs/installation/installing-from-pypi.rst +++ b/airflow-core/docs/installation/installing-from-pypi.rst @@ -31,20 +31,16 @@ Only ``pip`` installation is currently officially supported. While there are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as ``pip`` - especially when it comes to constraint vs. requirements management. - Installing via ``Poetry`` or ``pip-tools`` is not currently supported. If you wish to install airflow + Installing via ``Poetry`` or ``pip-tools`` is not currently supported. If you wish to install Airflow using those tools you should use the constraints and convert them to appropriate format and workflow that your tool requires. - There are known issues with ``bazel`` that might lead to circular dependencies when using it to install - Airflow. Please switch to ``pip`` if you encounter such problems. ``Bazel`` community works on fixing - the problem in `this PR `_ so it might be that - newer versions of ``bazel`` will handle it. -Typical command to install airflow from scratch in a reproducible way from PyPI looks like below: +Typical command to install Airflow from scratch in a reproducible way from PyPI looks like below: .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" Typically, you can add other dependencies and providers as separate command after the reproducible @@ -63,12 +59,12 @@ Those are just examples, see further for more explanation why those are the best .. note:: Generally speaking, Python community established practice is to perform application installation in a - virtualenv created with ``virtualenv`` or ``venv`` tools. You can also use ``uv`` or ``pipx`` to install + virtual environment created with ``virtualenv`` or ``venv`` tools. You can also use ``uv`` or ``pipx`` to install Airflow in application dedicated virtual environment created for you. There are also other tools that can be used - to manage your virtualenv installation and you are free to choose how you are managing the environments. - Airflow has no limitation regarding to the tool of your choice when it comes to virtual environment. + to manage your virtual environment installation and you are free to choose how you are managing the environments. + Airflow has no limitation regarding the tool of your choice when it comes to virtual environment. - The only exception where you might consider not using virtualenv is when you are building a container + The only exception where you might consider not using virtual environment is when you are building a container image with only Airflow installed - this is for example how Airflow is installed in the official Container image. @@ -84,10 +80,15 @@ Airflow® installation can be tricky because Airflow is both a library and an ap Libraries usually keep their dependencies open and applications usually pin them, but we should do neither and both at the same time. We decided to keep our dependencies as open as possible -(in ``pyproject.toml``) so users can install different version of libraries if needed. This means that +(in ``pyproject.toml``) so users can install different versions of libraries if needed. This means that from time to time plain ``pip install apache-airflow`` will not work or will produce an unusable Airflow installation. +.. warning:: + + As of Airflow 3.1, constraint files do not contain developer dependencies such as pytest, moto and + other development dependencies that are only used in tests. + Reproducible Airflow installation ================================= @@ -96,7 +97,7 @@ In order to have a reproducible installation, we also keep a set of constraint f for each released version e.g. :subst-code:`constraints-|version|`. This way, we keep a tested set of dependencies at the moment of release. This provides you with the ability -of having the exact same installation of airflow + providers + dependencies as was known to be working +of having the exact same installation of Airflow + providers + dependencies as was known to be working at the moment of release - frozen set of dependencies for that version of Airflow. There is a separate constraints file for each version of Python that Airflow supports. @@ -109,14 +110,14 @@ You can create the URL to the file substituting the variables in the template be where: - ``AIRFLOW_VERSION`` - Airflow version (e.g. :subst-code:`|version|`) or ``main``, ``2-0``, for latest development version -- ``PYTHON_VERSION`` Python version e.g. ``3.9``, ``3.10`` +- ``PYTHON_VERSION`` Python version e.g. ``3.10``, ``3.11`` -The examples below assume that you want to use install airflow in a reproducible way with the ``celery`` extra, +The examples below assume that you want to use install Airflow in a reproducible way with the ``celery`` extra, but you can pick your own set of extras and providers to install. .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" .. note:: @@ -135,7 +136,7 @@ Upgrading and installing dependencies (including providers) providers and other dependencies to other versions** You can, for example, install new versions of providers and dependencies after the release to use the latest -version and up-to-date with latest security fixes - even if you do not want upgrade airflow core version. +version and up-to-date with latest security fixes - even if you do not want upgrade Airflow core version. Or you can downgrade some dependencies or providers if you want to keep previous versions for compatibility reasons. Installing such dependencies should be done without constraints as a separate pip command. @@ -147,7 +148,7 @@ performing dependency resolution. .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" pip install "apache-airflow==|version|" apache-airflow-providers-google==10.1.1 You can also downgrade or upgrade other dependencies this way - even if they are not compatible with @@ -155,7 +156,7 @@ those dependencies that are stored in the original constraints file: .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" pip install "apache-airflow[celery]==|version|" dbt-core==0.20.0 .. warning:: @@ -184,7 +185,7 @@ consistent and not conflicting. No broken requirements found. -When you see such message and the exit code from ``pip check`` is 0, you can be sure, that there are no +When you see such message and the exit code from ``pip check`` is 0, you can be sure that there are no conflicting dependencies in your environment. @@ -198,7 +199,7 @@ one provided by the community. .. code-block:: bash - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt" pip install "apache-airflow==|version|" dbt-core==0.20.0 pip freeze > my-constraints.txt @@ -211,7 +212,7 @@ a local constraints file: pip install "apache-airflow[celery]==|version|" --constraint "my-constraints.txt" -Similarly as in case of Airflow original constraints, you can also host your constraints at your own +Similarly as in the case of Airflow original constraints, you can also host your constraints at your own repository or server and use it remotely from there. Fixing Constraints at release time @@ -219,7 +220,7 @@ Fixing Constraints at release time The released "versioned" constraints are mostly ``fixed`` when we release Airflow version and we only update them in exceptional circumstances. For example when we find out that the released constraints might prevent -Airflow from being installed consistently from the scratch. +Airflow from being installed consistently from scratch. In normal circumstances, the constraint files are not going to change if new version of Airflow dependencies are released - not even when those versions contain critical security fixes. @@ -260,7 +261,7 @@ providers in case they were released afterwards. Upgrading Airflow together with providers ========================================= -You can upgrade airflow together with extras (providers available at the time of the release of Airflow +You can upgrade Airflow together with extras (providers available at the time of the release of Airflow being installed. This will bring ``apache-airflow`` and all providers to the versions that were released and tested together when the version of Airflow you are installing was released. @@ -291,7 +292,7 @@ Constraints are only effective during the ``pip install`` command they were used It is the best practice to install apache-airflow in the same version as the one that comes from the original image. This way you can be sure that ``pip`` will not try to downgrade or upgrade apache -airflow while installing other requirements, which might happen in case you try to add a dependency +Airflow while installing other requirements, which might happen in case you try to add a dependency that conflicts with the version of apache-airflow that you are using: .. code-block:: bash @@ -314,20 +315,20 @@ Managing just Airflow core without providers ============================================ If you don't want to install any providers you have, just install or upgrade Apache Airflow, you can simply -install airflow in the version you need. You can use the special ``constraints-no-providers`` constraints +install Airflow in the version you need. You can use the special ``constraints-no-providers`` constraints file, which is smaller and limits the dependencies to the core of Airflow only, however this can lead to conflicts if your environment already has some of the dependencies installed in different versions and in case you have other providers installed. This command, however, gives you the latest versions of -dependencies compatible with just airflow core at the moment Airflow was released. +dependencies compatible with just Airflow core at the moment Airflow was released. .. code-block:: bash :substitutions: AIRFLOW_VERSION=|version| PYTHON_VERSION="$(python -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')" - # For example: 3.9 + # For example: 3.10 CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-no-providers-${PYTHON_VERSION}.txt" - # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.9.txt + # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.10.txt pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" Troubleshooting @@ -345,13 +346,13 @@ ensure that ``~/.local/bin`` is in your ``PATH`` environment variable, and add i PATH=$PATH:~/.local/bin -You can also start airflow with ``python -m airflow`` +You can also start Airflow with ``python -m airflow`` Symbol not found: ``_Py_GetArgcArgv`` ===================================== If you see ``Symbol not found: _Py_GetArgcArgv`` while starting or importing ``airflow``, this may mean that you are using an incompatible version of Python. -For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.9``: ``/usr/local/opt/python@3.9/Frameworks/Python.framework/Versions/3.9``). +For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.10``: ``/usr/local/opt/python@3.10/Frameworks/Python.framework/Versions/3.10``). The crux of the issue is that a library Airflow depends on, ``setproctitle``, uses a non-public Python API which is not available from the standard installation ``/usr/local/opt/`` (which symlinks to a path under ``/usr/local/Cellar``). @@ -360,9 +361,9 @@ An easy fix is just to ensure you use a version of Python that has a dylib of th .. code-block:: bash - # Note: these instructions are for python3.9 but can be loosely modified for other versions - brew install python@3.9 - virtualenv -p /usr/local/opt/python@3.9/Frameworks/Python.framework/Versions/3.9/bin/python3 .toy-venv + # Note: these instructions are for python3.10 but can be loosely modified for other versions + brew install python@3.10 + virtualenv -p /usr/local/opt/python@3.10/Frameworks/Python.framework/Versions/3.10/bin/python3 .toy-venv source .toy-venv/bin/activate pip install apache-airflow python diff --git a/airflow-core/docs/installation/installing-from-sources.rst b/airflow-core/docs/installation/installing-from-sources.rst index 949d949df2ac8..bafd3df9b338a 100644 --- a/airflow-core/docs/installation/installing-from-sources.rst +++ b/airflow-core/docs/installation/installing-from-sources.rst @@ -26,18 +26,18 @@ Released packages This page describes downloading and verifying Airflow® version ``{{ airflow_version }}`` using officially released packages. You can also install ``Apache Airflow`` - as most Python packages - via :doc:`PyPI `. - You can choose different version of Airflow by selecting different version from the drop-down at + You can choose different version of Airflow by selecting a different version from the drop-down at the top-left of the page. The ``source``, ``sdist`` and ``whl`` packages released are the "official" sources of installation that you can use if you want to verify the origin of the packages and want to verify checksums and signatures of the packages. The packages are available via the -`Official Apache Software Foundations Downloads `_ +`Official Apache Software Foundation Downloads `_ As of version 2.8 Airflow follows PEP 517/518 and uses ``pyproject.toml`` file to define build dependencies -and build process and it requires relatively modern versions of packaging tools to get airflow built from +and build process and it requires relatively modern versions of packaging tools to get Airflow built from local sources or ``sdist`` packages, as PEP 517 compliant build hooks are used to determine dynamic build -dependencies. In case of ``pip`` it means that at least version 22.1.0 is needed (released at the beginning of +dependencies. In case of ``pip``, it means that at least version 22.1.0 is needed (released at the beginning of 2022) to build or install Airflow from sources. This does not affect the ability of installing Airflow from released wheel packages. @@ -116,7 +116,7 @@ Example: The "Good signature from ..." is indication that the signatures are correct. Do not worry about the "not certified with a trusted signature" warning. Most of the certificates used -by release managers are self signed, that's why you get this warning. By importing the server in the +by release managers are self-signed, that's why you get this warning. By importing the server in the previous step and importing it via ID from ``KEYS`` page, you know that this is a valid Key already. For SHA512 sum check, download the relevant ``sha512`` and run the following: @@ -159,5 +159,5 @@ and SHA sum files with the script below: ls -la "${airflow_download_dir}" echo -Once you verify the files following the instructions from previous chapter you can remove the temporary +Once you verify the files following the instructions from previous section, you can remove the temporary folder created. diff --git a/airflow-core/docs/installation/prerequisites.rst b/airflow-core/docs/installation/prerequisites.rst index 3342b63bf1572..1997cb8f19d9a 100644 --- a/airflow-core/docs/installation/prerequisites.rst +++ b/airflow-core/docs/installation/prerequisites.rst @@ -20,18 +20,17 @@ Prerequisites Airflow® is tested with: -* Python: 3.9, 3.10, 3.11, 3.12 +* Python: 3.10, 3.11, 3.12, 3.13 * Databases: - * PostgreSQL: 12, 13, 14, 15, 16 + * PostgreSQL: 13, 14, 15, 16, 17 * MySQL: 8.0, `Innovation `_ * SQLite: 3.15.0+ -* Kubernetes: 1.26, 1.27, 1.28, 1.29, 1.30 +* Kubernetes: 1.30, 1.31, 1.32, 1.33 -The minimum memory required we recommend Airflow to run with is 4GB, but the actual requirements depend -wildly on the deployment options you have +While we recommend a minimum of 4GB of memory for Airflow, the actual requirements heavily depend on your chosen deployment. .. warning:: @@ -43,17 +42,17 @@ wildly on the deployment options you have because the number of users who tried to use MariaDB for Airflow is very small. .. warning:: - SQLite is used in Airflow tests. Do not use it in production. We recommend + SQLite is used in Airflow tests. DO NOT use it in production. We recommend using the latest stable version of SQLite for local development. .. warning:: Airflow® currently can be run on POSIX-compliant Operating Systems. For development it is regularly - tested on fairly modern Linux Distros that our contributors use and recent versions of MacOS. + tested on fairly modern Linux distributions that our contributors use and recent versions of MacOS. On Windows you can run it via WSL2 (Windows Subsystem for Linux 2) or via Linux Containers. The work to add Windows support is tracked via `#10388 `__ - but it is not a high priority. You should only use Linux-based distros as "Production" execution environment - as this is the only environment that is supported. The only distro that is used in our CI tests and that + but it is not a high priority. You should only use Linux-based distributions as "Production environment" + as this is the only environment that is supported. The only distribution that is used in our CI tests and that is used in the `Community managed DockerHub image `__ is ``Debian Bookworm``. diff --git a/airflow-core/docs/installation/supported-versions.rst b/airflow-core/docs/installation/supported-versions.rst index 6c3f8952b9568..a8ba1db336805 100644 --- a/airflow-core/docs/installation/supported-versions.rst +++ b/airflow-core/docs/installation/supported-versions.rst @@ -26,41 +26,42 @@ Apache Airflow® version life cycle: .. This table is automatically updated by pre-commit scripts/ci/pre_commit/supported_versions.py .. Beginning of auto-generated table -========= ===================== ========= =============== ================= ================ -Version Current Patch/Minor State First Release Limited Support EOL/Terminated -========= ===================== ========= =============== ================= ================ -2 2.10.5 Supported Dec 17, 2020 TBD TBD -1.10 1.10.15 EOL Aug 27, 2018 Dec 17, 2020 June 17, 2021 -1.9 1.9.0 EOL Jan 03, 2018 Aug 27, 2018 Aug 27, 2018 -1.8 1.8.2 EOL Mar 19, 2017 Jan 03, 2018 Jan 03, 2018 -1.7 1.7.1.2 EOL Mar 28, 2016 Mar 19, 2017 Mar 19, 2017 -========= ===================== ========= =============== ================= ================ +========= ===================== ========= =============== ===================== ================ +Version Current Patch/Minor State First Release Limited Maintenance EOL/Terminated +========= ===================== ========= =============== ===================== ================ +3 3.0.4 Supported Apr 22, 2025 TBD TBD +2 2.11.0 Supported Dec 17, 2020 Oct 22, 2025 Apr 22, 2026 +1.10 1.10.15 EOL Aug 27, 2018 Dec 17, 2020 June 17, 2021 +1.9 1.9.0 EOL Jan 03, 2018 Aug 27, 2018 Aug 27, 2018 +1.8 1.8.2 EOL Mar 19, 2017 Jan 03, 2018 Jan 03, 2018 +1.7 1.7.1.2 EOL Mar 28, 2016 Mar 19, 2017 Mar 19, 2017 +========= ===================== ========= =============== ===================== ================ .. End of auto-generated table -Limited support versions will be supported with security and critical bug fix only. -EOL versions will not get any fixes nor support. +Limited support versions will be supported with security and critical bug fixes only. +EOL versions will not get any fixes or support. We **highly** recommend installing the latest Airflow release which has richer features. Support for Python and Kubernetes versions `````````````````````````````````````````` -As of Airflow 2.0 we agreed to certain rules we follow for Python and Kubernetes support. +For Airflow 2.0+ versions, we agreed to certain rules we follow for Python and Kubernetes support. They are based on the official release schedule of Python and Kubernetes, nicely summarized in the `Python Developer's Guide `_ and `Kubernetes version skew policy `_. 1. We drop support for Python and Kubernetes versions when they reach EOL. We drop support for those - EOL versions in main right after EOL date, and it is effectively removed when we release the + EOL versions in main right after the EOL date, and it is effectively removed when we release the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow For example for Python 3.6 it means that we drop support in main right after 23.12.2021, and the first MAJOR or MINOR version of Airflow released after will not have it. 2. The "oldest" supported version of Python/Kubernetes is the default one. "Default" is only meaningful in terms of "smoke tests" in CI PRs which are run using this default version and default reference - image available in DockerHub. Currently the ``apache/airflow:latest`` and ``apache/airflow:2.10.2`` images + image available in Docker Hub. Currently the ``apache/airflow:latest`` and ``apache/airflow:2.10.2`` images are Python 3.8 images, however, in the first MINOR/MAJOR release of Airflow released after 2024-10-14, they will become Python 3.9 images. diff --git a/airflow-core/docs/installation/upgrading.rst b/airflow-core/docs/installation/upgrading.rst index 675896d147770..f2be39dd4275d 100644 --- a/airflow-core/docs/installation/upgrading.rst +++ b/airflow-core/docs/installation/upgrading.rst @@ -52,7 +52,7 @@ In some cases the upgrade happens automatically - it depends if in your deployme built-in as post-install action. For example when you are using :doc:`helm-chart:index` with post-upgrade hooks enabled, the database upgrade happens automatically right after the new software is installed. Similarly all Airflow-As-A-Service solutions perform the upgrade automatically for you, -when you choose to upgrade airflow via their UI. +when you choose to upgrade Airflow via their UI. How to upgrade ============== @@ -74,7 +74,7 @@ you access to Airflow ``CLI`` :doc:`/howto/usage-cli` and the database. Offline SQL migration scripts ============================= If you want to run the upgrade script offline, you can use the ``-s`` or ``--show-sql-only`` flag -to get the SQL statements that would be executed. You may also specify the starting airflow version with the ``--from-version`` flag and the ending airflow version with the ``-n`` or ``--to-version`` flag. This feature is supported in Postgres and MySQL +to get the SQL statements that would be executed. You may also specify the starting Airflow version with the ``--from-version`` flag and the ending Airflow version with the ``-n`` or ``--to-version`` flag. This feature is supported in Postgres and MySQL from Airflow 2.0.0 onward. Sample usage for Airflow version 2.7.0 or greater: diff --git a/airflow-core/docs/installation/upgrading_to_airflow3.rst b/airflow-core/docs/installation/upgrading_to_airflow3.rst index 355ae58a9d340..64d5a02ed9ad0 100644 --- a/airflow-core/docs/installation/upgrading_to_airflow3.rst +++ b/airflow-core/docs/installation/upgrading_to_airflow3.rst @@ -18,69 +18,134 @@ Upgrading to Airflow 3 ======================= -Apache Airflow 3 is a major release. This guide walks you through the steps required to upgrade from Airflow 2.x to Airflow 3.0. +Apache Airflow 3 is a major release and contains :ref:`breaking changes`. This guide walks you through the steps required to upgrade from Airflow 2.x to Airflow 3.0. + +Understanding Airflow 3.x Architecture Changes +----------------------------------------------- + +Airflow 3.x introduces significant architectural changes that improve security, scalability, and maintainability. Understanding these changes helps you prepare for the upgrade and adapt your workflows accordingly. + +Airflow 2.x Architecture +^^^^^^^^^^^^^^^^^^^^^^^^ +.. image:: ../img/airflow-2-arch.png + :alt: Airflow 2.x architecture diagram showing scheduler, metadata database, and worker + :align: center + +- All components communicate directly with the Airflow metadata database. +- Airflow 2 was designed to run all components within the same network space: task code and task execution code (airflow package code that runs user code) run in the same process. +- Workers communicate directly with the Airflow database and execute all user code. +- User code could import sessions and perform malicious actions on the Airflow metadata database. +- The number of connections to the database was excessive, leading to scaling challenges. + +Airflow 3.x Architecture +^^^^^^^^^^^^^^^^^^^^^^^^ +.. image:: ../img/airflow-3-arch.png + :alt: Airflow 3.x architecture diagram showing the decoupled Execution API Server and worker subprocesses + :align: center + +- The API server is currently the sole access point for the metadata DB for tasks and workers. +- It supports several applications: the Airflow REST API, an internal API for the Airflow UI that hosts static JS, and an API for workers to interact with when executing TIs via the task execution interface. +- Workers communicate with the API server instead of directly with the database. +- DAG processor and Triggerer utilize the task execution mechanism for their tasks, especially when they require variables or connections. + +Database Access Restrictions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +In Airflow 3, direct metadata database access from task code is now restricted. This is a key security and architectural improvement that affects how Dag authors interact with Airflow resources: + +- **No Direct Database Access**: Task code can no longer directly import and use Airflow database sessions or models. +- **API-Based Resource Access**: All runtime interactions (state transitions, heartbeats, XComs, and resource fetching) are handled through a dedicated Task Execution API. +- **Enhanced Security**: This ensures isolation and security by preventing malicious task code from accessing or modifying the Airflow metadata database. +- **Stable Interface**: The Task SDK provides a stable, forward-compatible interface for accessing Airflow resources without direct database dependencies. Step 1: Take care of prerequisites ---------------------------------- -- Make sure that you are on Airflow 2.7 or later. +- Make sure that you are on Airflow 2.7 or later. It is recommended to upgrade to latest 2.x and then to Airflow 3. - Make sure that your Python version is in the supported list. Airflow 3.0.0 supports the following Python versions: Python 3.9, 3.10, 3.11 and 3.12. -- Ensure that you are not using SubDAGs. These were deprecated in Airflow 2.0 and removed in Airflow 3. -- For a complete list of breaking changes, which you should note before the upgrade, please check the breaking changes section below. +- Ensure that you are not using any features or functionality that have been :ref:`removed in Airflow 3`. + Step 2: Clean and back up your existing Airflow Instance --------------------------------------------------------- -- It is highly recommended to make a backup of your Airflow instance specifically including your Airflow metadata DB before starting the migration process. -- If you do not have a "hot backup" capability for your DB, you should do it after shutting down your Airflow instances, so that the backup of your database will be consistent. -- If you did not make a backup and your migration fails, you might end-up in a half-migrated state and restoring DB from backup and repeating the migration - might be the only easy way out. This can for example be caused by a broken network connection between your CLI and the database while the migration happens, so taking a - backup is an important precaution to avoid problems like this. -- A long running Airflow instance can accumulate a certain amount of silt, in the form of old database entries, which are no longer - required. This is typically in the form of old XCom data which is no longer required, and so on. As part of the Airflow 3 upgrade - process, there will be schema changes. Based on the size of the Airflow meta-database this can be somewhat time - consuming. For a faster, safer migration, we recommend that you clean up your Airflow meta-database before the upgrade. - You can use ``airflow db clean`` command for that. - -Step 3: DAG Authors - Check your Airflow DAGs for compatibility +- It is highly recommended that you make a backup of your Airflow instance, specifically your Airflow metadata database before starting the migration process. + + - If you do not have a "hot backup" capability for your database, you should do it after shutting down your Airflow instances, so that the backup of your database will be consistent. For example, if you don't turn off your Airflow instance, the backup of the database will not include all TaskInstances or DagRuns. + + - If you did not make a backup and your migration fails, you might end up in a half-migrated state. This can be caused by, for example, a broken network connection between your Airflow CLI and the database during the migration. Having a backup is an important precaution to avoid problems like this. + +- A long running Airflow instance can accumulate a substantial amount of data that are no longer required (for example, old XCom data). Schema changes will be a part of the Airflow 3 + upgrade process. These schema changes can take a long time if the database is large. For a faster, safer migration, we recommend that you clean up your Airflow meta-database before the upgrade. + You can use the ``airflow db clean`` :ref:`Airflow CLI command` to trim your Airflow database. + +- Ensure that there are no errors related to dag processing, such as ``AirflowDagDuplicatedIdException``. You should + be able to run ``airflow dags reserialize`` with no errors. If you have to resolve errors from dag processing, + ensure you deploy your changes to your old instance prior to upgrade, and wait until your dags have all been reprocessed + (and all errors gone) before you proceed with upgrade. + +Step 3: Dag authors - Check your Airflow dags for compatibility ---------------------------------------------------------------- -To minimize friction for users upgrading from prior versions of Airflow, we have created a DAG upgrade check utility using `Ruff `_. +To minimize friction for users upgrading from prior versions of Airflow, we have created a dag upgrade check utility using `Ruff `_ combined with `AIR `_ rules. +The rules AIR301 and AIR302 indicate breaking changes in Airflow 3, while AIR311 and AIR312 highlight changes that are not currently breaking but are strongly recommended for updates. -Use the latest available ``ruff`` version to get updates to the rules but at the very least use ``0.11.6``: +The latest available ``ruff`` version will have the most up-to-date rules, but be sure to use at least version ``0.11.13``. The below example demonstrates how to check +for dag incompatibilities that will need to be fixed before they will work as expected on Airflow 3. .. code-block:: bash - ruff check dag/ --select AIR301 + ruff check dags/ --select AIR301 --preview -This command above shows you all the errors which need to be fixed before these DAGs can be used on Airflow 3. +To preview the recommended fixes, run the following command: -Some of these changes are automatically fixable and you can also rerun the command above with the auto-fix option as shown below. +.. code-block:: bash -To preview the changes: + ruff check dags/ --select AIR301 --show-fixes --preview + +Some changes can be automatically fixed. To do so, run the following command: .. code-block:: bash - ruff check dag/ --select AIR301 --show-fixes + ruff check dags/ --select AIR301 --fix --preview + -To auto-fix: +Some of the fixes are marked as unsafe. Unsafe fixes usually do not break dag code. They're marked as unsafe as they may change some runtime behavior. For more information, see `Fix Safety `_. +To trigger these fixes, run the following command: .. code-block:: bash - ruff check dag/ --select AIR301 --fix + ruff check dags/ --select AIR301 --fix --unsafe-fixes --preview + +.. note:: + Ruff has strict policy about when a rule becomes stable. Till it does you must use --preview flag. + The progress of Airflow Ruff rule become stable can be tracked in https://github.com/astral-sh/ruff/issues/17749 + That said, from Airflow side the rules are perfectly fine to be used. + +.. note:: -Step 4: Install the Standard Providers + In AIR rules, unsafe fixes involve changing import paths while keeping the name of the imported member the same. For instance, changing the import from ``from airflow.sensors.base_sensor_operator import BaseSensorOperator`` to ``from airflow.sdk.bases.sensor import BaseSensorOperator`` requires ruff to remove the original import before adding the new one. In contrast, safe fixes include changes to both the member name and the import path, such as changing ``from airflow.datasets import Dataset`` to `from airflow.sdk import Asset``. These adjustments do not require ruff to remove the old import. To remove unused legacy imports, it is necessary to enable the `unused-import` rule (F401) . + +You can also configure these flags through configuration files. See `Configuring Ruff `_ for details. + +Step 4: Install the Standard Provider -------------------------------------- -- Some of the commonly used Operators which were bundled as part of the Core Airflow OSS package such as the - Bash and Python Operators have now been split out into a separate package: ``apache-airflow-providers-standard``. -- For user convenience, this package can also be installed on Airflow 2.x versions, so that DAGs can be modified to reference these Operators from the Standard Provider package instead of Airflow Core. +- Some of the commonly used Operators which were bundled as part of the ``airflow-core`` package (for example ``BashOperator`` and ``PythonOperator``) + have now been split out into a separate package: ``apache-airflow-providers-standard``. +- For convenience, this package can also be installed on Airflow 2.x versions, so that DAGs can be modified to reference these Operators from the standard provider + package instead of Airflow Core. + +Step 5: Review custom operators for direct db access +---------------------------------------------------- +- In Airflow 3 operators can not access the Airflow metadata database directly using database sessions. + If you have custom operators, review the code to make sure there are no direct db access. + You can follow examples in https://github.com/apache/airflow/issues/49187 to find how to modify your code if needed. -Step 5: Deployment Managers - Upgrade your Airflow Instance +Step 6: Deployment Managers - Upgrade your Airflow Instance ------------------------------------------------------------ -For an easier and safer upgrade process, we have also created a utility to upgrade your Airflow instance configuration as a deployment manager. +For an easier and safer upgrade process, we have also created a utility to upgrade your Airflow instance configuration. The first step is to run this configuration check utility as shown below: @@ -97,32 +162,36 @@ This configuration utility can also update your configuration to automatically b airflow config update --fix -The biggest part of an Airflow upgrade is the database upgrade. The database upgrade process for Airflow 3 is the same as for Airflow 2.7 or later. - +The biggest part of an Airflow upgrade is the database upgrade. The database upgrade process for Airflow 3 is the same as for Airflow 2.7 or later: .. code-block:: bash airflow db migrate -You should now be able to start up your Airflow 3 instance. - +If you have plugins that use Flask-AppBuilder views ( ``appbuilder_views`` ), Flask-AppBuilder menu items ( ``appbuilder_menu_items`` ), or Flask blueprints ( ``flask_blueprints`` ), you will either need to convert +them to FastAPI apps or ensure you install the FAB provider which provides a backwards compatibility layer for Airflow 3. +Ideally, you should convert your plugins to the Airflow 3 Plugin interface i.e External Views (``external_views``), Fast API apps (``fastapi_apps``) +and FastAPI middlewares (``fastapi_root_middlewares``). -Step 6: Changes to your startup scripts +Step 7: Changes to your startup scripts --------------------------------------- -- In Airflow 3, the Webserver has now become a generic API-server. The api-server can be started up using the following command: +In Airflow 3, the Webserver has become a generic API server. The API server can be started up using the following command: .. code-block:: bash airflow api-server -- The DAG processor must now be started independently, even for local or development setups. +The dag processor must now be started independently, even for local or development setups: .. code-block:: bash airflow dag-processor +You should now be able to start up your Airflow 3 instance. + +.. _breaking-changes: Breaking Changes ================ @@ -130,12 +199,13 @@ Breaking Changes Some capabilities which were deprecated in Airflow 2.x are not available in Airflow 3. These include: -- **SubDAGs**: Replaced by TaskGroups, Datasets, and Data Aware Scheduling. +- **SubDAGs**: Replaced by TaskGroups, Assets, and Data Aware Scheduling. - **Sequential Executor**: Replaced by LocalExecutor, which can be used with SQLite for local development use cases. +- **CeleryKubernetesExecutor and LocalKubernetesExecutor**: Replaced by `Multiple Executor Configuration `_ - **SLAs**: Deprecated and removed; Will be replaced by forthcoming `Deadline Alerts `_. -- **Subdir**: Used as an argument on many CLI commands (``--subdir`` or ``-S`` has been superseded by DAG bundles. -- **Following keys are no longer available in task context. If not replaced, will cause DAG errors:**: - +- **Subdir**: Used as an argument on many CLI commands, ``--subdir`` or ``-S`` has been superseded by :doc:`DAG bundles `. +- **REST API** (``/api/v1``) replaced: Use the modern FastAPI-based stable ``/api/v2`` instead; see :doc:`Airflow API v2 ` for details. +- **Some Airflow context variables**: The following keys are no longer available in a :ref:`task instance's context `. If not replaced, will cause dag errors: - ``tomorrow_ds`` - ``tomorrow_ds_nodash`` - ``yesterday_ds`` @@ -148,11 +218,11 @@ These include: - ``next_ds_nodash`` - ``next_ds`` - ``execution_date`` - -- ``catchup_by_default`` is now ``False`` by default. -- ``create_cron_data_intervals`` is now ``False``. This means that the ``CronTriggerTimetable`` will be used by default instead of the ``CronDataIntervalTimetable`` -- **Simple Auth** is now default ``auth_manager``. To continue using FAB as the Auth Manager, please install the FAB provider and set ``auth_manager`` to +- The ``catchup_by_default`` dag parameter is now ``False`` by default. +- The ``create_cron_data_intervals`` configuration is now ``False`` by default. This means that the ``CronTriggerTimetable`` will be used by default instead of the ``CronDataIntervalTimetable`` +- **Simple Auth** is now default ``auth_manager``. To continue using FAB as the Auth Manager, please install the FAB provider and set ``auth_manager`` to ``FabAuthManager``: .. code-block:: ini airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager +- **AUTH API** api routes defined in the auth manager are prefixed with the ``/auth`` route. Urls consumed outside of the application such as oauth redirect urls will have to updated accordingly. For example an oauth redirect url that was ``https:///oauth-authorized/google`` in Airflow 2.x will be ``https:///auth/oauth-authorized/google`` in Airflow 3.x diff --git a/airflow-core/docs/migrations-ref.rst b/airflow-core/docs/migrations-ref.rst index 2f35cf2deecf4..0856550d666de 100644 --- a/airflow-core/docs/migrations-ref.rst +++ b/airflow-core/docs/migrations-ref.rst @@ -39,7 +39,37 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``29ce7909c52b`` (head) | ``959e216a3abb`` | ``3.0.0`` | Change TI table to have unique UUID id/pk per attempt. | +| ``a169942745c2`` (head) | ``808787349f22`` | ``3.1.0`` | Remove dag_id from Deadline. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``808787349f22`` | ``3bda03debd04`` | ``3.1.0`` | Modify deadline's callback schema. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``3bda03debd04`` | ``f56f68b9e02f`` | ``3.1.0`` | Add url template and template params to DagBundleModel. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``f56f68b9e02f`` | ``09fa89ba1710`` | ``3.1.0`` | Add callback_state to deadline. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``09fa89ba1710`` | ``40f7c30a228b`` | ``3.1.0`` | Add trigger_id to deadline. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``40f7c30a228b`` | ``ffdb0566c7c0`` | ``3.1.0`` | Add Human In the Loop Detail table. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``ffdb0566c7c0`` | ``66a7743fe20e`` | ``3.1.0`` | Add dag_favorite table. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``66a7743fe20e`` | ``583e80dfcef4`` | ``3.1.0`` | Add triggering user to dag_run. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``583e80dfcef4`` | ``3ac9e5732b1f`` | ``3.1.0`` | Add task_inlet_asset_reference table. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``3ac9e5732b1f`` | ``0242ac120002`` | ``3.1.0`` | Change the on-delete behaviour of | +| | | | task_instance.dag_version_id foreign key constraint to | +| | | | RESTRICT. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``0242ac120002`` | ``dfee8bd5d574`` | ``3.1.0`` | Rename Deadline column in the Deadline table from deadline | +| | | | to deadline_time and change its type from DateTime to UTC | +| | | | DateTime. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``dfee8bd5d574`` | ``fe199e1abd77`` | ``3.1.0`` | Add Deadline to Dag. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``fe199e1abd77`` | ``29ce7909c52b`` | ``3.0.3`` | Delete import errors. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``29ce7909c52b`` | ``959e216a3abb`` | ``3.0.0`` | Change TI table to have unique UUID id/pk per attempt. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``959e216a3abb`` | ``0e9519b56710`` | ``3.0.0`` | Rename ``is_active`` to ``is_stale`` column in ``dag`` | | | | | table. | diff --git a/airflow-core/docs/operators-and-hooks-ref.rst b/airflow-core/docs/operators-and-hooks-ref.rst index 511775d117eb4..fad6b04724f6c 100644 --- a/airflow-core/docs/operators-and-hooks-ref.rst +++ b/airflow-core/docs/operators-and-hooks-ref.rst @@ -1,3 +1,4 @@ + .. Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information @@ -50,20 +51,26 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - Operators - Guides - * - :mod:`airflow.operators.branch` - - + * - :mod:`airflow.providers.standard.operators.bash` + - :doc:`How to use ` + + * - :mod:`airflow.providers.standard.operators.python` + - :doc:`How to use ` + + * - :mod:`airflow.providers.standard.operators.datetime` + - :doc:`How to use ` * - :mod:`airflow.providers.standard.operators.empty` - - * - :mod:`airflow.operators.generic_transfer` - - + * - :mod:`airflow.providers.common.sql.operators.generic_transfer.GenericTransfer` + - :doc:`How to use ` * - :mod:`airflow.providers.standard.operators.latest_only` - - + - :doc:`How to use ` * - :mod:`airflow.providers.standard.operators.trigger_dagrun` - - + - :doc:`How to use ` **Sensors:** @@ -73,8 +80,23 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - Sensors - Guides - * - :mod:`airflow.sensors.base` - - + * - :mod:`airflow.providers.standard.sensors.bash` + - :doc:`How to use ` + + * - :mod:`airflow.providers.standard.sensors.python` + - :doc:`How to use ` + + * - :mod:`airflow.providers.standard.sensors.filesystem` + - :doc:`How to use ` + + * - :mod:`airflow.providers.standard.sensors.date_time` + - :doc:`How to use ` + + * - :mod:`airflow.providers.standard.sensors.external_task` + - :doc:`How to use ` + + + **Hooks:** diff --git a/airflow-core/docs/public-airflow-interface.rst b/airflow-core/docs/public-airflow-interface.rst index 4b87bd200b09b..32fc89a4173c4 100644 --- a/airflow-core/docs/public-airflow-interface.rst +++ b/airflow-core/docs/public-airflow-interface.rst @@ -15,6 +15,18 @@ specific language governing permissions and limitations under the License. +===================================== +Public Interface for Airflow 3.0+ +===================================== + +.. warning:: + + This documentation covers the Public Interface for Airflow 3.0+ + + If you are using Airflow 2.x, please refer to the + `Airflow 2.11 Public Interface Documentation `_ + for the legacy interface. + Public Interface of Airflow ........................... @@ -25,9 +37,32 @@ and extending Airflow capabilities by writing new executors, plugins, operators Public Interface can be useful for building custom tools and integrations with other systems, and for automating certain aspects of the Airflow workflow. +The primary public interface for Dag authors and task execution is using task SDK +Airflow task SDK is the primary public interface for Dag authors and for task execution +:doc:`airflow.sdk namespace `. Direct access to the metadata database +from task code is no longer allowed. Instead, use the :doc:`Stable REST API `, +`Python Client `_, or Task Context methods. + +For comprehensive Task SDK documentation, see the `Task SDK Reference `_. + Using Airflow Public Interfaces =============================== +.. note:: + + As of **Airflow 3.0**, users should use the ``airflow.sdk`` namespace as the official **Public Interface**, as defined in `AIP-72 `_. + + Direct interaction with internal modules or the metadata database is not possible. + For stable, production-safe integration, it is recommended to use: + + - The official **REST API** + - The **Python Client SDK** (`airflow-client-python`) + - The new **Task SDK** (``airflow.sdk``) + + Related docs: + - `Release Notes 3.0 `_ + - `Task SDK Overview `_ + The following are some examples of the public interface of Airflow: * When you are writing your own operators or hooks. This is commonly done when no hook or operator exists for your use case, or when perhaps when one exists but you need to customize the behavior. @@ -46,29 +81,82 @@ MAJOR version of Airflow. On the other hand, classes and methods starting with ` as protected Python methods) and ``__`` (also known as private Python methods) are not part of the Public Airflow Interface and might change at any time. -You can also use Airflow's Public Interface via the `Stable REST API `_ (based on the +You can also use Airflow's Public Interface via the :doc:`Stable REST API ` (based on the OpenAPI specification). For specific needs you can also use the -`Airflow Command Line Interface (CLI) `_ though its behaviour might change +:doc:`Airflow Command Line Interface (CLI) ` though its behaviour might change in details (such as output format and available flags) so if you want to rely on those in programmatic way, the Stable REST API is recommended. -Using the Public Interface for DAG Authors +Using the Public Interface for Dag authors ========================================== +The primary interface for Dag authors is the :doc:`airflow.sdk namespace `. +This provides a stable, well-defined interface for creating DAGs and tasks that is not subject to internal +implementation changes. The goal of this change is to decouple Dag authoring from Airflow internals (Scheduler, +API Server, etc.), providing a version-agnostic, stable interface for writing and maintaining DAGs across Airflow versions. + +**Key Imports from airflow.sdk:** + +**Classes:** + +* ``Asset`` +* ``BaseHook`` +* ``BaseNotifier`` +* ``BaseOperator`` +* ``BaseOperatorLink`` +* ``BaseSensorOperator`` +* ``Connection`` +* ``Context`` +* ``DAG`` +* ``EdgeModifier`` +* ``Label`` +* ``ObjectStoragePath`` +* ``Param`` +* ``TaskGroup`` +* ``Variable`` + +**Decorators and Functions:** + +* ``@asset`` +* ``@dag`` +* ``@setup`` +* ``@task`` +* ``@task_group`` +* ``@teardown`` +* ``chain`` +* ``chain_linear`` +* ``cross_downstream`` +* ``get_current_context`` +* ``get_parsing_context`` + +**Migration from Airflow 2.x:** + +For detailed migration instructions from Airflow 2.x to 3.x, including import changes and other breaking changes, +see the :doc:`Migration Guide `. + +For an exhaustive list of available classes, decorators, and functions, check ``airflow.sdk.__all__``. + +All DAGs should update imports to use ``airflow.sdk`` instead of referencing internal Airflow modules directly. +Legacy import paths (e.g., ``airflow.models.dag.DAG``, ``airflow.decorator.task``) are deprecated and will be +removed in a future Airflow version. + Dags ----- +==== The DAG is Airflow's core entity that represents a recurring workflow. You can create a DAG by -instantiating the :class:`~airflow.models.dag.DAG` class in your DAG file. You can also instantiate -them via :class:`~airflow.models.dagbag.DagBag` class that reads dags from a file or a folder. Dags -can also have parameters specified via :class:`~airflow.sdk.definitions.param.Param` class. +instantiating the :class:`~airflow.sdk.DAG` class in your DAG file. Dags can also have parameters +specified via :class:`~airflow.sdk.Param` class. + +The recommended way to create DAGs is using the :func:`~airflow.sdk.dag` decorator +from the airflow.sdk namespace. Airflow has a set of example dags that you can use to learn how to write dags .. toctree:: :includehidden: :glob: + :hidden: :maxdepth: 1 _api/airflow/example_dags/index @@ -77,69 +165,86 @@ You can read more about dags in :doc:`Dags `. References for the modules used in dags are here: -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 +.. note:: + The airflow.sdk namespace provides the primary interface for Dag authors. + For detailed API documentation, see the `Task SDK Reference `_. - _api/airflow/models/dag/index - _api/airflow/models/dagbag/index - _api/airflow/models/param/index +.. note:: + The :class:`~airflow.models.dagbag.DagBag` class is used internally by Airflow for loading DAGs + from files and folders. Dag authors should use the :class:`~airflow.sdk.DAG` class from the + airflow.sdk namespace instead. -Properties of a :class:`~airflow.models.dagrun.DagRun` can also be referenced in things like :ref:`Templates `. - -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 - - _api/airflow/models/dagrun/index +.. note:: + The :class:`~airflow.models.dagrun.DagRun` class is used internally by Airflow for DAG run + management. Dag authors should access DAG run information through the Task Context via + :func:`~airflow.sdk.get_current_context` or use the :class:`~airflow.sdk.types.DagRunProtocol` + interface. .. _pythonapi:operators: Operators ---------- +========= + +The base classes :class:`~airflow.sdk.BaseOperator` and :class:`~airflow.sdk.BaseSensorOperator` are public and may be extended to make new operators. -The base classes :class:`~airflow.models.baseoperator.BaseOperator` and :class:`~airflow.sensors.base.BaseSensorOperator` are public and may be extended to make new operators. +The base class for new operators is :class:`~airflow.sdk.BaseOperator` +from the airflow.sdk namespace. Subclasses of BaseOperator which are published in Apache Airflow are public in *behavior* but not in *structure*. That is to say, the Operator's parameters and behavior is governed by semver but the methods are subject to change at any time. Task Instances --------------- - -Task instances are the individual runs of a single task in a DAG (in a DAG Run). They are available in the context -passed to the execute method of the operators via the :class:`~airflow.models.taskinstance.TaskInstance` class. - -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 +============== - _api/airflow/models/taskinstance/index +Task instances are the individual runs of a single task in a DAG (in a DAG Run). Task instances are accessed through +the Task Context via :func:`~airflow.sdk.get_current_context`. Direct database access is not possible. +.. note:: + Task Context is part of the airflow.sdk namespace. + For detailed API documentation, see the `Task SDK Reference `_. Task Instance Keys ------------------- +================== Task instance keys are unique identifiers of task instances in a DAG (in a DAG Run). A key is a tuple that consists of -``dag_id``, ``task_id``, ``run_id``, ``try_number``, and ``map_index``. The key of a task instance can be retrieved via -:meth:`~airflow.models.taskinstance.TaskInstance.key`. +``dag_id``, ``task_id``, ``run_id``, ``try_number``, and ``map_index``. -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 +Direct access to task instance keys via the :class:`~airflow.models.taskinstance.TaskInstance` +model is no longer allowed from task code. Instead, use the Task Context via :func:`~airflow.sdk.get_current_context` +to access task instance information. + +Example of accessing task instance information through Task Context: + +.. code-block:: python + + from airflow.sdk import get_current_context + + + def my_task(): + context = get_current_context() + ti = context["ti"] + + dag_id = ti.dag_id + task_id = ti.task_id + run_id = ti.run_id + try_number = ti.try_number + map_index = ti.map_index + + print(f"Task: {dag_id}.{task_id}, Run: {run_id}, Try: {try_number}, Map Index: {map_index}") + +.. note:: + The :class:`~airflow.models.taskinstancekey.TaskInstanceKey` class is used internally by Airflow + for identifying task instances. Dag authors should access task instance information through the + Task Context via :func:`~airflow.sdk.get_current_context` instead. - _api/airflow/models/taskinstancekey/index .. _pythonapi:hooks: Hooks ------ +===== Hooks are interfaces to external platforms and databases, implementing a common interface when possible and acting as building blocks for operators. All hooks -are derived from :class:`~airflow.hooks.base.BaseHook`. +are derived from :class:`~airflow.sdk.bases.hook.BaseHook`. Airflow has a set of Hooks that are considered public. You are free to extend their functionality by extending them: @@ -147,19 +252,50 @@ by extending them: .. toctree:: :includehidden: :glob: + :hidden: :maxdepth: 1 _api/airflow/hooks/index Public Airflow utilities ------------------------- +======================== -When writing or extending Hooks and Operators, DAG authors and developers can +When writing or extending Hooks and Operators, Dag authors and developers can use the following classes: -* The :class:`~airflow.models.connection.Connection`, which provides access to external service credentials and configuration. -* The :class:`~airflow.models.variable.Variable`, which provides access to Airflow configuration variables. -* The :class:`~airflow.models.xcom.XCom` which are used to access to inter-task communication data. +* The :class:`~airflow.sdk.Connection`, which provides access to external service credentials and configuration. +* The :class:`~airflow.sdk.Variable`, which provides access to Airflow configuration variables. +* The :class:`~airflow.sdk.execution_time.xcom.XCom` which are used to access to inter-task communication data. + +Connection and Variable operations should be performed through the Task Context using +:func:`~airflow.sdk.get_current_context` and the task instance's methods, or through the airflow.sdk namespace. +Direct database access to :class:`~airflow.models.connection.Connection` and :class:`~airflow.models.variable.Variable` +models is no longer allowed from task code. + +Example of accessing Connections and Variables through Task Context: + +.. code-block:: python + + from airflow.sdk import get_current_context + + + def my_task(): + context = get_current_context() + + conn = context["conn"] + my_connection = conn.get("my_connection_id") + + var = context["var"] + my_variable = var.value.get("my_variable_name") + +Example of using airflow.sdk namespace directly: + +.. code-block:: python + + from airflow.sdk import Connection, Variable + + conn = Connection.get("my_connection_id") + var = Variable.get("my_variable_name") You can read more about the public Airflow utilities in :doc:`howto/connection`, :doc:`core-concepts/variables`, :doc:`core-concepts/xcoms` @@ -167,18 +303,13 @@ You can read more about the public Airflow utilities in :doc:`howto/connection`, Reference for classes used for the utilities are here: -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 - - _api/airflow/models/connection/index - _api/airflow/models/variable/index - _api/airflow/models/xcom/index +.. note:: + Connection, Variable, and XCom classes are now part of the airflow.sdk namespace. + For detailed API documentation, see the `Task SDK Reference `_. Public Exceptions ------------------ +================= When writing the custom Operators and Hooks, you can handle and raise public Exceptions that Airflow exposes: @@ -186,16 +317,18 @@ exposes: .. toctree:: :includehidden: :glob: + :hidden: :maxdepth: 1 _api/airflow/exceptions/index Public Utility classes ----------------------- +====================== .. toctree:: :includehidden: :glob: + :hidden: :maxdepth: 1 _api/airflow/utils/state/index @@ -215,7 +348,7 @@ that do not require plugins - you can read more about them in :doc:`howto/custom Here are the ways how Plugins can be used to extend Airflow: Triggers --------- +======== Airflow uses Triggers to implement ``asyncio`` compatible Deferrable Operators. All Triggers derive from :class:`~airflow.triggers.base.BaseTrigger`. @@ -226,6 +359,7 @@ by extending them: .. toctree:: :includehidden: :glob: + :hidden: :maxdepth: 1 _api/airflow/triggers/index @@ -233,7 +367,7 @@ by extending them: You can read more about Triggers in :doc:`authoring-and-scheduling/deferring`. Timetables ----------- +========== Custom timetable implementations provide Airflow's scheduler additional logic to schedule DAG runs in ways not possible with built-in schedule expressions. @@ -244,6 +378,7 @@ by extending them: .. toctree:: :includehidden: + :hidden: :maxdepth: 1 _api/airflow/timetables/index @@ -251,7 +386,7 @@ by extending them: You can read more about Timetables in :doc:`howto/timetable`. Listeners ---------- +========= Listeners enable you to respond to DAG/Task lifecycle events. @@ -264,11 +399,8 @@ can be implemented to respond to DAG/Task lifecycle events. You can read more about Listeners in :doc:`administration-and-deployment/listeners`. -.. - TODO AIP-72: This class has been moved to task sdk but we cannot add a doc reference for it yet because task sdk doesn't have rendered docs yet. - Extra Links ------------ +=========== Extra links are dynamic links that could be added to Airflow independently from custom Operators. Normally they can be defined by the Operators, but plugins allow you to override the links on a global level. @@ -285,7 +417,7 @@ You can read more about providers :doc:`providers `. Executors ---------- +========= Executors are the mechanism by which task instances get run. All executors are derived from :class:`~airflow.executors.base_executor.BaseExecutor`. There are several @@ -305,10 +437,10 @@ You can read more about executors and how to write your own in :doc:`core-concep executors, and custom executors could not provide full functionality that built-in executors had. Secrets Backends ----------------- +================ Airflow can be configured to rely on secrets backends to retrieve -:class:`~airflow.models.connection.Connection` and :class:`~airflow.models.variable.Variable`. +:class:`~airflow.sdk.Connection` and :class:`~airflow.sdk.Variable`. All secrets backends derive from :class:`~airflow.secrets.base_secrets.BaseSecretsBackend`. All Secrets Backend implementations are public. You can extend their functionality: @@ -316,6 +448,7 @@ All Secrets Backend implementations are public. You can extend their functionali .. toctree:: :includehidden: :glob: + :hidden: :maxdepth: 1 _api/airflow/secrets/index @@ -325,7 +458,7 @@ You can also find all the available Secrets Backends implemented in community pr in :doc:`apache-airflow-providers:core-extensions/secrets-backends`. Auth managers -------------- +============= Auth managers are responsible of user authentication and user authorization in Airflow. All auth managers are derived from :class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager`. @@ -336,21 +469,21 @@ public, but the different implementations of auth managers are not (i.e. FabAuth You can read more about auth managers and how to write your own in :doc:`core-concepts/auth-manager/index`. Connections ------------ +=========== When creating Hooks, you can add custom Connections. You can read more about connections in :doc:`apache-airflow-providers:core-extensions/connections` for available Connections implemented in the community providers. Extra Links ------------ +=========== When creating Hooks, you can add custom Extra Links that are displayed when the tasks are run. You can find out more about extra links in :doc:`apache-airflow-providers:core-extensions/extra-links` that also shows available extra links implemented in the community providers. Logging and Monitoring ----------------------- +====================== You can extend the way how logs are written by Airflow. You can find out more about log writing in :doc:`administration-and-deployment/logging-monitoring/index`. @@ -359,40 +492,44 @@ The :doc:`apache-airflow-providers:core-extensions/logging` that also shows avai implemented in the community providers. Decorators ----------- -DAG authors can use decorators to author dags using the :doc:`TaskFlow ` concept. -All Decorators derive from :class:`~airflow.decorators.base.TaskDecorator`. +========== +Dag authors can use decorators to author dags using the :doc:`TaskFlow ` concept. +All Decorators derive from :class:`~airflow.sdk.bases.decorator.TaskDecorator`. + +The primary decorators for Dag authors are now in the airflow.sdk namespace: +:func:`~airflow.sdk.dag`, :func:`~airflow.sdk.task`, :func:`~airflow.sdk.asset`, +:func:`~airflow.sdk.setup`, :func:`~airflow.sdk.task_group`, :func:`~airflow.sdk.teardown`, +:func:`~airflow.sdk.chain`, :func:`~airflow.sdk.chain_linear`, :func:`~airflow.sdk.cross_downstream`, +:func:`~airflow.sdk.get_current_context` and :func:`~airflow.sdk.get_parsing_context`. Airflow has a set of Decorators that are considered public. You are free to extend their functionality by extending them: -.. toctree:: - :includehidden: - :maxdepth: 1 - - _api/airflow/decorators/index +.. note:: + Decorators are now part of the airflow.sdk namespace. + For detailed API documentation, see the `Task SDK Reference `_. You can read more about creating custom Decorators in :doc:`howto/create-custom-decorator`. Email notifications -------------------- +=================== Airflow has a built-in way of sending email notifications and it allows to extend it by adding custom email notification classes. You can read more about email notifications in :doc:`howto/email-config`. Notifications -------------- +============= Airflow has a built-in extensible way of sending notifications using the various ``on_*_callback``. You can read more about notifications in :doc:`howto/notifications`. Cluster Policies ----------------- +================ Cluster Policies are the way to dynamically apply cluster-wide policies to the dags being parsed or tasks being executed. You can read more about Cluster Policies in :doc:`administration-and-deployment/cluster-policies`. Lineage -------- +======= Airflow can help track origins of data, what happens to it and where it moves over time. You can read more about lineage in :doc:`administration-and-deployment/lineage`. @@ -408,12 +545,59 @@ Everything not mentioned in this document should be considered as non-Public Int Sometimes in other applications those components could be relied on to keep backwards compatibility, but in Airflow they are not parts of the Public Interface and might change any time: -* `Database structure `_ is considered to be an internal implementation +* :doc:`Database structure ` is considered to be an internal implementation detail and you should not assume the structure is going to be maintained in a backwards-compatible way. -* `Web UI `_ is continuously evolving and there are no backwards compatibility guarantees on HTML elements. +* :doc:`Web UI ` is continuously evolving and there are no backwards + compatibility guarantees on HTML elements. * Python classes except those explicitly mentioned in this document, are considered an internal implementation detail and you should not assume they will be maintained in a backwards-compatible way. + +**Direct metadata database access from task code is no longer allowed**. +Task code cannot directly access the metadata database to query DAG state, task history, +or DAG runs. Instead, use one of the following alternatives: + +* **Task Context**: Use :func:`~airflow.sdk.get_current_context` to access task instance + information and methods like :meth:`~airflow.sdk.types.RuntimeTaskInstanceProtocol.get_dr_count`, + :meth:`~airflow.sdk.types.RuntimeTaskInstanceProtocol.get_dagrun_state`, and + :meth:`~airflow.sdk.types.RuntimeTaskInstanceProtocol.get_task_states`. + +* **REST API**: Use the :doc:`Stable REST API ` for programmatic + access to Airflow metadata. + +* **Python Client**: Use the `Python Client `_ for Python-based + interactions with Airflow. + +This change improves architectural separation and enables remote execution capabilities. + +Example of using Task Context instead of direct database access: + +.. code-block:: python + + from airflow.sdk import dag, get_current_context, task + from airflow.utils.state import DagRunState + from datetime import datetime + + + @dag(dag_id="example_dag", start_date=datetime(2025, 1, 1), schedule="@hourly", tags=["misc"], catchup=False) + def example_dag(): + + @task(task_id="check_dagrun_state") + def check_state(): + context = get_current_context() + ti = context["ti"] + dag_run = context["dag_run"] + + # Use Task Context methods instead of direct DB access + dr_count = ti.get_dr_count(dag_id="example_dag") + dagrun_state = ti.get_dagrun_state(dag_id="example_dag", run_id=dag_run.run_id) + + return f"DAG run count: {dr_count}, current state: {dagrun_state}" + + check_state() + + + example_dag() diff --git a/airflow-core/docs/redirects.txt b/airflow-core/docs/redirects.txt index 9580d883a7a12..4904b7c38bfa2 100644 --- a/airflow-core/docs/redirects.txt +++ b/airflow-core/docs/redirects.txt @@ -26,12 +26,12 @@ administration-and-deployment/security/kerberos.rst security/kerberos.rst ## It's okay to include ``/stable/``, because there's no relationship between a version of ## Airflow and the version of any provider package. -administration-and-deployment/security/access-control.rst ../apache-airflow-providers-fab/stable/auth-manager/access-control.rst -administration-and-deployment/security/access-control/index.rst ../apache-airflow-providers-fab/stable/auth-manager/access-control.rst +administration-and-deployment/security/access-control.rst ../../apache-airflow-providers-fab/stable/auth-manager/access-control.rst +administration-and-deployment/security/access-control/index.rst ../../apache-airflow-providers-fab/stable/auth-manager/access-control.rst administration-and-deployment/security/api.rst security/api.rst administration-and-deployment/security/audit_logs.rst security/audit_logs.rst administration-and-deployment/security/flower.rst security/flower.rst -administration-and-deployment/security/webserver.rst ../apache-airflow-providers-fab/stable/auth-manager/security.rst +administration-and-deployment/security/webserver.rst ../../apache-airflow-providers-fab/stable/auth-manager/security.rst administration-and-deployment/security/workload.rst security/workload.rst administration-and-deployment/security/secrets/secrets-backends/index.rst security/secrets/secrets-backends/index.rst administration-and-deployment/security/secrets/secrets-backends/local-filesystem-secrets-backend.rst security/secrets/secrets-backends/local-filesystem-secrets-backend.rst @@ -44,7 +44,7 @@ howto/use-alternative-secrets-backend.rst security/secrets/secrets-backend/index security.rst security/index.rst # Move the documentation from core to FAB provider -security/access-control.rst ../apache-airflow-providers-fab/stable/auth-manager/access-control.rst +security/access-control.rst ../../apache-airflow-providers-fab/stable/auth-manager/access-control.rst # Operators guides howto/operator/external.rst howto/operator/external_task_sensor.rst @@ -54,7 +54,7 @@ howto/customize-dag-ui-page-instance-name.rst howto/customize-ui.rst#customizing howto/customize-state-colors-ui.rst howto/customize-ui.rst#customizing-state-colours # Web UI -howto/add-new-role.rst ../apache-airflow-providers-fab/stable/auth-manager/access-control.rst +howto/add-new-role.rst ../../apache-airflow-providers-fab/stable/auth-manager/access-control.rst # Set up a database howto/initialize-database.rst howto/set-up-database.rst @@ -75,7 +75,7 @@ start/index.rst start.rst # References cli-ref.rst cli-and-env-variables-ref.rst _api/index.rst public-airflow-interface.rst -rest-api-ref.rst deprecated-rest-api-ref.rst +deprecated-rest-api-ref.rst rest-api-ref.rst macros-ref.rst templates-ref.rst # Concepts @@ -85,7 +85,6 @@ scheduler.rst administration-and-deployment/scheduler.rst # Installation installation.rst installation/index.rst upgrade-check.rst installation/upgrade-check.rst -upgrading-to-2.rst howto/upgrading-from-1-10/index.rst # Release Notes changelog.rst release_notes.rst @@ -137,7 +136,7 @@ concepts/scheduler.rst administration-and-deployment/scheduler.rst concepts/pools.rst administration-and-deployment/pools.rst concepts/priority-weight.rst administration-and-deployment/priority-weight.rst concepts/deferring.rst authoring-and-scheduling/deferring.rst -concepts/datasets.rst authoring-and-scheduling/datasets.rst +concepts/datasets.rst authoring-and-scheduling/asset-scheduling.rst concepts/cluster-policies.rst administration-and-deployment/cluster-policies.rst concepts/dags.rst core-concepts/dags.rst executor/local_kubernetes.rst core-concepts/executor/local_kubernetes.rst @@ -151,6 +150,7 @@ core-concepts/dag-run.rst authoring-and-scheduling/cron.rst core-concepts/executor/debug.rst core-concepts/debug.rst concepts/dagfile-processing.rst administration-and-deployment/dagfile-processing.rst authoring-and-scheduling/dagfile-processing.rst administration-and-deployment/dagfile-processing.rst +authoring-and-scheduling/datasets.rst authoring-and-scheduling/asset-scheduling.rst # Moving provider executor docs to providers ## The ``../`` indicates that it will move to the root of the docs directory, unlike the rest of @@ -158,12 +158,12 @@ authoring-and-scheduling/dagfile-processing.rst administration-and-deployment/da ## It's okay to include ``/stable/``, because there's no relationship between a version of ## Airflow and the version of any provider package. ### Kubernetes Executors -core-concepts/executor/kubernetes.rst ../apache-airflow-providers-cncf-kubernetes/stable/kubernetes_executor.html -core-concepts/executor/local_kubernetes.rst ../apache-airflow-providers-cncf-kubernetes/stable/local_kubernetes_executor.html +core-concepts/executor/kubernetes.rst ../../apache-airflow-providers-cncf-kubernetes/stable/kubernetes_executor.html +core-concepts/executor/local_kubernetes.rst ../../apache-airflow-providers-cncf-kubernetes/stable/local_kubernetes_executor.html ### Celery Executors -core-concepts/executor/celery_kubernetes.rst ../apache-airflow-providers-celery/stable/celery_kubernetes_executor.html -core-concepts/executor/celery.rst ../apache-airflow-providers-celery/stable/celery_executor.html +core-concepts/executor/celery_kubernetes.rst ../../apache-airflow-providers-celery/stable/celery_kubernetes_executor.html +core-concepts/executor/celery.rst ../../apache-airflow-providers-celery/stable/celery_executor.html # Python API python-api-ref.rst public-airflow-interface.rst @@ -173,3 +173,24 @@ howto/define_extra_link.rst howto/define-extra-link.rst # Use test config (it's not a howto for users but a howto for developers so we redirect it back to index) howto/use-test-config.rst index.rst + +# Operators/Sensors moved to standard providers + +howto/operator/bash.rst ../../apache-airflow-providers-standard/stable/operators/bash.rst +howto/operator/datetime.rst ../../apache-airflow-providers-standard/stable/operators/datetime.rst +howto/operator/external_task_sensor.rst ../../apache-airflow-providers-standard/stable/sensors/external_task_sensor.rst +howto/operator/file.rst ../../apache-airflow-providers-standard/stable/sensors/file.rst +howto/operator/python.rst ../../apache-airflow-providers-standard/stable/operators/python.rst +howto/operator/time.rst ../../apache-airflow-providers-standard/stable/sensors/datetime.rst +howto/operator/weekday.rst ../../apache-airflow-providers-standard/stable/operators/datetime.rst#branchdayofweekoperator + +_api/airflow/operators/datetime/index.html ../../apache-airflow-providers-standard/stable/_api/airflow/providers/standard/operators/datetime/index.html +_api/airflow/operators/empty/index.html ../../apache-airflow-providers-standard/stable/_api/airflow/providers/standard/operators/empty/index.html +_api/airflow/operators/bash/index.html ../../apache-airflow-providers-standard/stable/_api/airflow/providers/standard/operators/bash/index.html +_api/airflow/operators/branch/index.html ../../apache-airflow-providers-standard/stable/_api/airflow/providers/standard/operators/branch/index.html +_api/airflow/operators/latest_only/index.html ../../apache-airflow-providers-standard/stable/_api/airflow/providers/standard/operators/latest_only/index.html +_api/airflow/operators/python/index.html ../../apache-airflow-providers-standard/stable/_api/airflow/providers/standard/operators/python/index.html +_api/airflow/operators/trigger_dagrun/index.html ../../apache-airflow-providers-standard/stable/_api/airflow/providers/standard/operators/trigger_dagrun/index.html + +_api/airflow/operators/email/index.html ../../apache-airflow-providers-smtp/stable/_api/airflow/providers/smtp/operators/smtp/index.html +_api/airflow/operators/generic_transfer/index.html ../../apache-airflow-providers-common-sql/stable/_api/airflow/providers/common/sql/operators/generic_transfer/index.html diff --git a/airflow-core/docs/security/api.rst b/airflow-core/docs/security/api.rst index 115cfec443aef..65469c880f9a8 100644 --- a/airflow-core/docs/security/api.rst +++ b/airflow-core/docs/security/api.rst @@ -43,7 +43,7 @@ Request .. code-block:: bash - ENDPOINT_URL="http://localhost:8080/" + ENDPOINT_URL="http://localhost:8080" curl -X POST ${ENDPOINT_URL}/auth/token \ -H "Content-Type: application/json" \ -d '{ @@ -63,7 +63,7 @@ Use the JWT token to call Airflow public API .. code-block:: bash - ENDPOINT_URL="http://localhost:8080/" + ENDPOINT_URL="http://localhost:8080" curl -X GET ${ENDPOINT_URL}/api/v2/dags \ -H "Authorization: Bearer " @@ -93,3 +93,10 @@ Page size limit To protect against requests that may lead to application instability, the stable API has a limit of items in response. The default is 100 items, but you can change it using ``maximum_page_limit`` option in ``[api]`` section in the ``airflow.cfg`` file. + +Request Payload Considerations +------------------------------ + +When using REST APIs that accept data payloads (such as the Variables API), be mindful of the payload size. +Large payloads (out of ordinary size, like a million bytes) can impact the performance of the Airflow webserver. +It's recommended to implement appropriate size limits at the proxy layer for your deployment. diff --git a/airflow-core/docs/security/deprecated_permissions.rst b/airflow-core/docs/security/deprecated_permissions.rst new file mode 100644 index 0000000000000..69deb91e58e44 --- /dev/null +++ b/airflow-core/docs/security/deprecated_permissions.rst @@ -0,0 +1,60 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Deprecation Notice for airflow.security.permissions +=================================================== + +Since the release of Airflow 3, the Flask AppBuilder (AKA "FAB") provider is +`no longer a core Airflow dependency `_. +However, some modules specifically designed for :doc:`apache-airflow-providers-fab:auth-manager/index` remain in the core Airflow distribution as a +backwards-compatible convenience for Airflow users. One such module which remains in the core distribution for backwards compatibility purposes is ``airflow.security.permissions`` + +If your deployment depends on ``airflow.security.permissions`` for any custom role definitions, or for any custom Auth Manager logic -- +regardless of whether you use the FAB Auth Manager or some other Auth Manager -- you should transition +to the new authorization standard definitions for resources and actions. +The deprecated ``airflow.security.permissions`` will be removed in Airflow 4. + +Does this Deprecation Affect My Airflow System? +----------------------------------------------- + +Generally speaking, this deprecation warning applies to any Airflow system in which **either** of the following is true: + +* The Airflow deployment relies on ``airflow.security.permissions`` to define custom RBAC roles. +* The Airflow deployment has other custom logic which relies on ``airflow.security.permissions``, including any custom :doc:`/core-concepts/auth-manager/index` dependencies. + +However, if you rely on the **unmodified** :doc:`apache-airflow-providers-fab:auth-manager/index` and you **do not** use any custom role definitions, then the rest of this doc does not apply to you. +Similarly, if you rely on the :doc:`/core-concepts/auth-manager/simple/index` or any of the other provider Auth Managers, and have no custom code using ``airflow.security.permissions``, then the rest of this doc does not apply to you. + +.. note:: + Each customized Airflow RBAC setup differs on a case-by-case basis. As such, this doc can only provide general + guidance for the transition to the new Airflow authorization standards, without being overly prescriptive. + +How to Migrate to the New Authorization Standard Definitions +------------------------------------------------------------ + +Refer to the list below for the deprecated permissions module components, and the corresponding supported +replacement available from Airflow core: + +* ``airflow.security.permissions.ACTION_*`` --> ``airflow.api_fastapi.auth.managers.base_auth_manager.ResourceMethod`` +* ``airflow.security.permissions.RESOURCE_*`` --> ``airflow.api_fastapi.auth.managers.models.resource_details`` +* ``DAG.access_control`` --> DAG-level permissions should be handled by the chosen Auth Manager's ``filter_authorized_dag_ids`` method. + +If you maintain a custom :doc:`/core-concepts/auth-manager/index` which relies on the deprecated module, it is +recommended you refer to the ``SimpleAuthManager``'s `source code `_ +as an example for how you might use the ``ResourceMethod`` and ``resource_details`` components. + +If you rely on custom role definitions based off the deprecated module, you should refer to the documentation of the auth manager your system uses. diff --git a/airflow-core/docs/security/kerberos.rst b/airflow-core/docs/security/kerberos.rst index b38ebb6146782..0dea38c60bbe0 100644 --- a/airflow-core/docs/security/kerberos.rst +++ b/airflow-core/docs/security/kerberos.rst @@ -45,10 +45,10 @@ To enable Kerberos you will need to generate a (service) key tab. # in the kadmin.local or kadmin shell, create the airflow principal kadmin: addprinc -randkey airflow/fully.qualified.domain.name@YOUR-REALM.COM - # Create the airflow keytab file that will contain the airflow principal + # Create the Airflow keytab file that will contain the Airflow principal kadmin: xst -norandkey -k airflow.keytab airflow/fully.qualified.domain.name -Now store this file in a location where the airflow user can read it (chmod 600). And then add the following to +Now store this file in a location where the Airflow user can read it (chmod 600). And then add the following to your ``airflow.cfg`` .. code-block:: ini @@ -103,9 +103,9 @@ Launch the ticket renewer by To support more advanced deployment models for using kerberos in standard or one-time fashion, you can specify the mode while running the ``airflow kerberos`` by using the ``--one-time`` flag. -a) standard: The airflow kerberos command will run endlessly. The ticket renewer process runs continuously every few seconds +a) standard: The Airflow kerberos command will run endlessly. The ticket renewer process runs continuously every few seconds and refreshes the ticket if it has expired. -b) one-time: The airflow kerberos will run once and exit. In case of failure the main task won't spin up. +b) one-time: The Airflow kerberos will run once and exit. In case of failure the main task won't spin up. The default mode is standard. diff --git a/airflow-core/docs/security/sbom.rst b/airflow-core/docs/security/sbom.rst index 273c16c79cecd..0f1d699368499 100644 --- a/airflow-core/docs/security/sbom.rst +++ b/airflow-core/docs/security/sbom.rst @@ -25,7 +25,7 @@ of the software dependencies. The general use case for such files is to help assess and manage risks. For instance a quick lookup against your SBOM files can help identify if a CVE (Common Vulnerabilities and Exposures) in a library is affecting you. -By default, Apache Airflow SBOM files are generated for airflow core with all providers. In the near future we aim at generating SBOM files per provider and also provide them for docker standard images. +By default, Apache Airflow SBOM files are generated for Airflow core with all providers. In the near future we aim at generating SBOM files per provider and also provide them for docker standard images. -Each airflow version has its own SBOM files, one for each supported python version. -You can find them `here `_. +Each Airflow version has its own SBOM files, one for each supported python version. +You can find them `here <../sbom/>`_. diff --git a/airflow-core/docs/security/security_model.rst b/airflow-core/docs/security/security_model.rst index cf19d0a276e20..1915b3117abae 100644 --- a/airflow-core/docs/security/security_model.rst +++ b/airflow-core/docs/security/security_model.rst @@ -39,7 +39,7 @@ This is why Airflow has the following user types: * Deployment Managers - overall responsible for the Airflow installation, security and configuration * Authenticated UI users - users that can access Airflow UI and API and interact with it -* DAG Authors - responsible for creating dags and submitting them to Airflow +* Dag authors - responsible for creating dags and submitting them to Airflow You can see more on how the user types influence Airflow's architecture in :doc:`/core-concepts/overview`, including, seeing the diagrams of less and more complex deployments. @@ -58,14 +58,14 @@ can also decide to keep audits, backups and copies of information outside of Airflow, which are not covered by Airflow's security model. -DAG Authors +Dag authors ........... They can create, modify, and delete DAG files. The code in DAG files is executed on workers and in the DAG Processor. -Therefore, DAG authors can create and change code executed on workers +Therefore, Dag authors can create and change code executed on workers and the DAG Processor and potentially access the credentials that the DAG -code uses to access external systems. DAG Authors have full access +code uses to access external systems. Dag authors have full access to the metadata database. Authenticated UI users @@ -115,9 +115,9 @@ Connection configuration users .............................. They configure connections and potentially execute code on workers during DAG execution. Trust is -required to prevent misuse of these privileges. They have full access -to sensitive credentials stored in connections and can modify them. -Access to sensitive information through connection configuration +required to prevent misuse of these privileges. They have full write-only access +to sensitive credentials stored in connections and can modify them, but cannot view them. +Access to write sensitive information through connection configuration should be trusted not to be abused. They also have the ability to configure connections wrongly that might create a API Server Denial of Service situations and specify insecure connection options which might create situations where executing dags will lead to arbitrary Remote Code Execution @@ -125,6 +125,15 @@ for some providers - either community released or custom ones. Those users should be highly trusted not to misuse this capability. +.. note:: + + Before Airflow 3, the **Connection configuration users** role had also access to view the sensitive information this has + been changed in Airflow 3 to improve security of the accidental spilling of credentials of the connection configuration + users. Previously - in Airflow 2 - the **Connection configuration users** had deliberately access to view the + sensitive information and could either reveal it by using Inspect capabilities of the browser or they were plain visible in + case of the sensitive credentials stored in configuration extras. Airflow 3 and later versions include security + improvement to mask those sensitive credentials at the API level. + Audit log users ............... @@ -146,12 +155,12 @@ Viewers also do not have permission to access audit logs. For more information on the capabilities of authenticated UI users, see :doc:`apache-airflow-providers-fab:auth-manager/access-control`. -Capabilities of DAG Authors +Capabilities of Dag authors --------------------------- -DAG authors are able to create or edit code - via Python files placed in a dag bundle - that will be executed +Dag authors are able to create or edit code - via Python files placed in a dag bundle - that will be executed in a number of circumstances. The code to execute is neither verified, checked nor sand-boxed by Airflow -(that would be very difficult if not impossible to do), so effectively DAG authors can execute arbitrary +(that would be very difficult if not impossible to do), so effectively Dag authors can execute arbitrary code on the workers (part of Celery Workers for Celery Executor, local processes run by scheduler in case of Local Executor, Task Kubernetes POD in case of Kubernetes Executor), in the DAG Processor and in the Triggerer. @@ -161,86 +170,86 @@ There are several consequences of this model chosen by Airflow, that deployment Local executor .............. -In case of Local Executor, DAG authors can execute arbitrary code on the machine where scheduler is running. +In case of Local Executor, Dag authors can execute arbitrary code on the machine where scheduler is running. This means that they can affect the scheduler process itself, and potentially affect the whole Airflow installation - including modifying cluster-wide policies and changing Airflow configuration. If you are running -Airflow with Local Executor, the Deployment Manager must trust the DAG authors not to abuse this capability. +Airflow with Local Executor, the Deployment Manager must trust the Dag authors not to abuse this capability. Celery Executor ............... -In case of Celery Executor, DAG authors can execute arbitrary code on the Celery Workers. This means that +In case of Celery Executor, Dag authors can execute arbitrary code on the Celery Workers. This means that they can potentially influence all the tasks executed on the same worker. If you are running Airflow with -Celery Executor, the Deployment Manager must trust the DAG authors not to abuse this capability and unless +Celery Executor, the Deployment Manager must trust the Dag authors not to abuse this capability and unless Deployment Manager separates task execution by queues by Cluster Policies, they should assume, there is no isolation between tasks. Kubernetes Executor ................... -In case of Kubernetes Executor, DAG authors can execute arbitrary code on the Kubernetes POD they run. Each +In case of Kubernetes Executor, Dag authors can execute arbitrary code on the Kubernetes POD they run. Each task is executed in a separate POD, so there is already isolation between tasks as generally speaking Kubernetes provides isolation between PODs. Triggerer ......... -In case of Triggerer, DAG authors can execute arbitrary code in Triggerer. Currently there are no +In case of Triggerer, Dag authors can execute arbitrary code in Triggerer. Currently there are no enforcement mechanisms that would allow to isolate tasks that are using deferrable functionality from each other and arbitrary code from various tasks can be executed in the same process/machine. Deployment -Manager must trust that DAG authors will not abuse this capability. +Manager must trust that Dag authors will not abuse this capability. DAG files not needed for Scheduler and API Server ................................................. -The Deployment Manager might isolate the code execution provided by DAG authors - particularly in +The Deployment Manager might isolate the code execution provided by Dag authors - particularly in Scheduler and API Server by making sure that the Scheduler and API Server don't even -have access to the DAG Files. Generally speaking - no DAG author provided code should ever be +have access to the DAG Files. Generally speaking - no Dag author provided code should ever be executed in the Scheduler or API Server process. This means the deployment manager can exclude credentials needed for dag bundles on the Scheduler and API Server - but the bundles must still be configured on those components. -Allowing DAG authors to execute selected code in Scheduler and API Server +Allowing Dag authors to execute selected code in Scheduler and API Server ......................................................................... -There are a number of functionalities that allow the DAG author to use pre-registered custom code to be +There are a number of functionalities that allow the Dag author to use pre-registered custom code to be executed in the Scheduler or API Server process - for example they can choose custom Timetables, UI plugins, Connection UI Fields, Operator extra links, macros, listeners - all of those functionalities allow the -DAG author to choose the code that will be executed in the Scheduler or API Server process. However this -should not be arbitrary code that DAG author can add dag bundles. All those functionalities are +Dag author to choose the code that will be executed in the Scheduler or API Server process. However this +should not be arbitrary code that Dag author can add dag bundles. All those functionalities are only available via ``plugins`` and ``providers`` mechanisms where the code that is executed can only be provided by installed packages (or in case of plugins it can also be added to PLUGINS folder where DAG authors should not have write access to). PLUGINS_FOLDER is a legacy mechanism coming from Airflow 1.10 - but we recommend using entrypoint mechanism that allows the Deployment Manager to - effectively - -choose and register the code that will be executed in those contexts. DAG Author has no access to +choose and register the code that will be executed in those contexts. Dag author has no access to install or modify packages installed in Scheduler and API Server, and this is the way to prevent -the DAG Author to execute arbitrary code in those processes. +the Dag author to execute arbitrary code in those processes. Additionally, if you decide to utilize and configure the PLUGINS_FOLDER, it is essential for the Deployment -Manager to ensure that the DAG author does not have write access to this folder. +Manager to ensure that the Dag author does not have write access to this folder. -The Deployment Manager might decide to introduce additional control mechanisms to prevent DAG authors from +The Deployment Manager might decide to introduce additional control mechanisms to prevent Dag authors from executing arbitrary code. This is all fully in hands of the Deployment Manager and it is discussed in the following chapter. Access to all dags ........................................................................ -All dag authors have access to all dags in the Airflow deployment. This means that they can view, modify, +All Dag authors have access to all dags in the Airflow deployment. This means that they can view, modify, and update any dag without restrictions at any time. Responsibilities of Deployment Managers --------------------------------------- -As a Deployment Manager, you should be aware of the capabilities of DAG authors and make sure that +As a Deployment Manager, you should be aware of the capabilities of Dag authors and make sure that you trust them not to abuse the capabilities they have. You should also make sure that you have -properly configured the Airflow installation to prevent DAG authors from executing arbitrary code +properly configured the Airflow installation to prevent Dag authors from executing arbitrary code in the Scheduler and API Server processes. Deploying and protecting Airflow installation ............................................. -Deployment Managers are also responsible for deploying airflow and make it accessible to the users +Deployment Managers are also responsible for deploying Airflow and make it accessible to the users in the way that follows best practices of secure deployment applicable to the organization where Airflow is deployed. This includes but is not limited to: @@ -252,10 +261,10 @@ Airflow is deployed. This includes but is not limited to: * any kind of detection of unusual activity and protection against it * choosing the right session backend and configuring it properly including timeouts for the session -Limiting DAG Author capabilities +Limiting Dag author capabilities ................................. -The Deployment Manager might also use additional mechanisms to prevent DAG authors from executing +The Deployment Manager might also use additional mechanisms to prevent Dag authors from executing arbitrary code - for example they might introduce tooling around DAG submission that would allow to review the code before it is deployed, statically-check it and add other ways to prevent malicious code to be submitted. The way submitting code to a DAG bundle is done and protected is completely diff --git a/airflow-core/docs/security/workload.rst b/airflow-core/docs/security/workload.rst index 9f6bfecad94e8..31714aa21fbb2 100644 --- a/airflow-core/docs/security/workload.rst +++ b/airflow-core/docs/security/workload.rst @@ -29,8 +29,8 @@ instances based on the task's ``run_as_user`` parameter, which takes a user's na **NOTE:** For impersonations to work, Airflow requires ``sudo`` as subtasks are run with ``sudo -u`` and permissions of files are changed. Furthermore, the unix user needs to exist on the worker. Here is what a simple sudoers file entry could look -like to achieve this, assuming airflow is running as the ``airflow`` user. This means -the airflow user must be trusted and treated the same way as the root user. +like to achieve this, assuming Airflow is running as the ``airflow`` user. This means +the Airflow user must be trusted and treated the same way as the root user. .. code-block:: none diff --git a/airflow-core/docs/start.rst b/airflow-core/docs/start.rst index 9986b575a27a0..d40165001619c 100644 --- a/airflow-core/docs/start.rst +++ b/airflow-core/docs/start.rst @@ -24,9 +24,11 @@ This quick start guide will help you bootstrap an Airflow standalone instance on .. note:: - Successful installation requires a Python 3 environment. Starting with Airflow 2.7.0, Airflow supports Python 3.9, 3.10, 3.11, and 3.12. + Successful installation requires a Python 3 environment. Starting with Airflow 3.1.0, Airflow supports Python 3.10, 3.11, 3.12, 3.13. - Officially supported installation methods include ``pip`` and ``uv``. Both tools provide a streamlined workflow for installing Airflow and managing dependencies. + Officially supported installation methods is with``pip`. + + Run ``pip install apache-airflow[EXTRAS]==AIRFLOW_VERSION --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-AIRFLOW_VERSION/constraints-PYTHON_VERSION.txt"``, for example ``pip install "apache-airflow[celery]==3.0.0" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.0/constraints-3.10.txt"`` to install Airflow in a reproducible way. @@ -35,11 +37,6 @@ This quick start guide will help you bootstrap an Airflow standalone instance on ``pip`` or ``uv`` - especially when it comes to constraint vs. requirements management. Installing via ``Poetry`` or ``pip-tools`` is not currently supported. - There are known issues with ``bazel`` that might lead to circular dependencies when using it to install - Airflow. Please switch to ``pip`` or ``uv`` if you encounter such problems. ``Bazel`` community works on fixing - the problem in `this PR `_ so it might be that - newer versions of ``bazel`` will handle it. - If you wish to install Airflow using those tools you should use the constraint files and convert them to appropriate format and workflow that your tool requires. @@ -61,8 +58,8 @@ This quick start guide will help you bootstrap an Airflow standalone instance on Install uv: `uv Installation Guide `_ - For creating Virtualenv with uv, refer to the documentation here: - `Creating and Maintaining Local Virtualenv with uv `_ + For creating virtual environment with ``uv``, refer to the documentation here: + `Creating and Maintaining Local virtual environment with uv `_ 3. Install Airflow using the constraints file, which is determined based on the URL we pass: @@ -71,14 +68,14 @@ This quick start guide will help you bootstrap an Airflow standalone instance on :substitutions: - AIRFLOW_VERSION=2.10.5 + AIRFLOW_VERSION=3.0.0 # Extract the version of Python you have installed. If you're currently using a Python version that is not supported by Airflow, you may want to set this manually. # See above for supported versions. PYTHON_VERSION="$(python -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')" CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-${PYTHON_VERSION}.txt" - # For example this would install 2.10.5 with python 3.9: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.9.txt + # For example this would install 3.0.0 with python 3.10: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.10.txt uv pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" @@ -99,7 +96,7 @@ and create the "airflow.cfg" file with defaults that will get you going fast. You can override defaults using environment variables, see :doc:`/configurations-ref`. You can inspect the file either in ``$AIRFLOW_HOME/airflow.cfg``, or through the UI in the ``Admin->Configuration`` menu. The PID file for the webserver will be stored -in ``$AIRFLOW_HOME/airflow-webserver.pid`` or in ``/run/airflow/webserver.pid`` +in ``$AIRFLOW_HOME/airflow-api-server.pid`` or in ``/run/airflow/airflow-webserver.pid`` if started by systemd. As you grow and deploy Airflow to production, you will also want to move away @@ -116,8 +113,8 @@ run the commands below. airflow tasks test example_bash_operator runme_0 2015-01-01 # run a backfill over 2 days airflow backfill create --dag-id example_bash_operator \ - --start-date 2015-01-01 \ - --end-date 2015-01-02 + --from-date 2015-01-01 \ + --to-date 2015-01-02 If you want to run the individual parts of Airflow manually rather than using the all-in-one ``standalone`` command, you can instead run: diff --git a/airflow-core/docs/templates-ref.rst b/airflow-core/docs/templates-ref.rst index ccd2d08c88937..1d226b1ec12fb 100644 --- a/airflow-core/docs/templates-ref.rst +++ b/airflow-core/docs/templates-ref.rst @@ -54,10 +54,10 @@ Variable Type Description | ``None`` ``{{ start_date }}`` `pendulum.DateTime`_ Datetime of when current task has been started. ``{{ inlets }}`` list List of inlets declared on the task. -``{{ inlet_events }}`` dict[str, ...] Access past events of inlet assets. See :doc:`Assets `. Added in version 2.10. +``{{ inlet_events }}`` dict[str, ...] Access past events of inlet assets. See :doc:`Assets `. Added in version 2.10. ``{{ outlets }}`` list List of outlets declared on the task. ``{{ outlet_events }}`` dict[str, ...] | Accessors to attach information to asset events that will be emitted by the current task. - | See :doc:`Assets `. Added in version 2.10. + | See :doc:`Assets `. Added in version 2.10. ``{{ dag }}`` DAG The currently running :class:`~airflow.models.dag.DAG`. You can read more about dags in :doc:`Dags `. ``{{ task }}`` BaseOperator | The currently running :class:`~airflow.models.baseoperator.BaseOperator`. You can read more about Tasks in :doc:`core-concepts/operators` ``{{ task_reschedule_count }}`` int How many times current task has been rescheduled. Relevant to ``mode="reschedule"`` sensors. @@ -81,7 +81,7 @@ Variable Type Description | Added in version 2.5. ``{{ triggering_asset_events }}`` dict[str, | If in an Asset Scheduled DAG, a map of Asset URI to a list of triggering :class:`~airflow.models.asset.AssetEvent` list[AssetEvent]] | (there may be more than one, if there are multiple Assets with different frequencies). - | Read more here :doc:`Assets `. + | Read more here :doc:`Assets `. | Added in version 2.4. =========================================== ===================== =================================================================== @@ -186,7 +186,7 @@ Variable Description ``macros.random`` The standard lib's :class:`random.random` ================================= ============================================== -Some airflow specific macros are also defined: +Some Airflow specific macros are also defined: .. automodule:: airflow.macros :members: diff --git a/airflow-core/docs/troubleshooting.rst b/airflow-core/docs/troubleshooting.rst index f636b87a42c47..f354ea1a2ff6e 100644 --- a/airflow-core/docs/troubleshooting.rst +++ b/airflow-core/docs/troubleshooting.rst @@ -46,3 +46,13 @@ Here are some examples that could cause such an event: - A DAG run timeout, specified by ``dagrun_timeout`` in the DAG's definition. - An Airflow worker running out of memory - Usually, Airflow workers that run out of memory receive a SIGKILL, and the scheduler will fail the corresponding task instance for not having a heartbeat. However, in some scenarios, Airflow kills the task before that happens. + +Lingering task supervisor processes +----------------------------------- + +Under very high concurrency the socket handlers inside the task supervisor may +miss the final EOF events from the task process. When this occurs the supervisor +believes sockets are still open and will not exit. The +:ref:`workers.socket_cleanup_timeout ` option controls how long the supervisor +waits after the task finishes before force-closing any remaining sockets. If you +observe leftover ``supervisor`` processes, consider increasing this delay. diff --git a/airflow-core/docs/tutorial/fundamentals.rst b/airflow-core/docs/tutorial/fundamentals.rst index e1fa0f3684b56..2f4cee031dc59 100644 --- a/airflow-core/docs/tutorial/fundamentals.rst +++ b/airflow-core/docs/tutorial/fundamentals.rst @@ -43,7 +43,7 @@ each line in detail. Understanding the DAG Definition File ------------------------------------- Think of the Airflow Python script as a configuration file that lays out the structure of your DAG in code. The actual -tasks you define here run in a different environment, which means this script isn't meant for data processing. It's main +tasks you define here run in a different environment, which means this script isn't meant for data processing. Its main job is to define the DAG object, and it needs to evaluate quickly since the DAG File Processor checks it regularly for any changes. @@ -90,7 +90,7 @@ Next, we'll need to create a DAG object to house our tasks. We'll provide a uniq Understanding Operators ----------------------- An operator represents a unit of work in Airflow. They are the building blocks of your workflows, allowing you to -define what tasks will be executed. While we can use operators for many tasks, Airflow also offers the :doc:`Taskflow API ` +define what tasks will be executed. While we can use operators for many tasks, Airflow also offers the :doc:`TaskFlow API ` for a more Pythonic way to define workflows, which we'll touch on later. All operators derive from the ``BaseOperator``, which includes the essential arguments needed to run tasks in Airflow. @@ -173,7 +173,7 @@ documentation at the start of your DAG file. Setting up Dependencies ----------------------- -In Airflow, tasks can depend on one another. For instance, if you have tasks ``t1``, ``t2``, and ``t3``, you can defined +In Airflow, tasks can depend on one another. For instance, if you have tasks ``t1``, ``t2``, and ``t3``, you can define their dependencies in several ways: .. code-block:: python @@ -210,8 +210,8 @@ times. Working with Time Zones ----------------------- -Creating a time zone aware DAG straightforward. Just ensure you use time zone aware dates -with ``pendulum``. Avoid using the standard library +Creating a time zone aware DAG is straightforward. Just ensure you use time zone aware dates +with `pendulum `_. Avoid using the standard library `timezone `_ as they have known limitations. Recap @@ -252,8 +252,8 @@ Let's validate your script further by running a few commands: # prints the list of tasks in the "tutorial" DAG airflow tasks list tutorial - # prints the hierarchy of tasks in the "tutorial" DAG - airflow tasks list tutorial --tree + # prints the graphviz representation of "tutorial" DAG + airflow dags show tutorial Testing Task Instances and DAG Runs diff --git a/airflow-core/docs/tutorial/hitl.rst b/airflow-core/docs/tutorial/hitl.rst new file mode 100644 index 0000000000000..c16decd039976 --- /dev/null +++ b/airflow-core/docs/tutorial/hitl.rst @@ -0,0 +1,102 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +HITLOperator (Human-in-the-loop) +================================ + +.. versionadded:: 3.1 + +Human-in-the-Loop (HITL) functionality allows you to incorporate human decision-making directly into your workflows. +This powerful feature enables workflows to pause and wait for human input, making it perfect for approval processes, manual quality checks, and scenarios where human judgment is essential. + +In this tutorial, we will explore how to use the HITL operators in workflows. + +An HITL Example Dag +------------------- + +Here is what HITL looks like in a Dag. We'll break it down and dive into it. + +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py + :language: python + :start-after: [START hitl_tutorial] + :end-before: [END hitl_tutorial] + + +Input Provision +--------------- + +Users can provide input using params that is used for subsequent tasks. +This is useful for workflows involving human guidance within large language model (LLM) workflows. + +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py + :language: python + :dedent: 4 + :start-after: [START howto_hitl_entry_operator] + :end-before: [END howto_hitl_entry_operator] + + +Option Selection +---------------- + +Input can be provided in the form of options. +Users can select one of the available options, which can be used to direct the workflow. + +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py + :language: python + :dedent: 4 + :start-after: [START howto_hitl_operator] + :end-before: [END howto_hitl_operator] + +Approval or Rejection +--------------------- + +A specialized form of option selection, which has only 'Approval' and 'Rejection' as options. + +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py + :language: python + :dedent: 4 + :start-after: [START howto_hitl_approval_operator] + :end-before: [END howto_hitl_approval_operator] + +As you can see in the body of this code snippet, you can use XComs to get information provided by the user. + +Branch Selection +---------------- + +Users can choose which branches to follow within the Dag. +This is commonly applied in scenarios such as content moderation, where human judgment is sometimes required. + +This is like option selection, but the option needs to be a task. +And remember to specify their relationship in the workflow. + +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py + :language: python + :dedent: 4 + :start-after: [START howto_hitl_branch_operator] + :end-before: [END howto_hitl_branch_operator] + +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py + :language: python + :dedent: 4 + :start-after: [START howto_hitl_workflow] + :end-before: [END howto_hitl_workflow] + +Benefits and Common Use Cases +----------------------------- + +HITL functionality is valuable in large language model (LLM) workflows, where human-provided guidance can be essential for achieving better results. +It is also highly beneficial in enterprise data pipelines, where human validation can complement and enhance automated processes. diff --git a/airflow-core/docs/tutorial/index.rst b/airflow-core/docs/tutorial/index.rst index 2c0a4492c0717..9699967e1eca1 100644 --- a/airflow-core/docs/tutorial/index.rst +++ b/airflow-core/docs/tutorial/index.rst @@ -27,3 +27,4 @@ Once you have Airflow up and running with the :doc:`/start`, these tutorials are taskflow pipeline objectstorage + hitl diff --git a/airflow-core/docs/tutorial/objectstorage.rst b/airflow-core/docs/tutorial/objectstorage.rst index 59c4142f7ce85..de80e8c973c16 100644 --- a/airflow-core/docs/tutorial/objectstorage.rst +++ b/airflow-core/docs/tutorial/objectstorage.rst @@ -23,7 +23,7 @@ Cloud-Native Workflows with Object Storage .. versionadded:: 2.8 -Welcome to the final tutorial in our Airflow series! By now, you've built DAGs with Python and the Taskflow API, passed +Welcome to the final tutorial in our Airflow series! By now, you've built DAGs with Python and the TaskFlow API, passed data with XComs, and chained tasks together into clear, reusable workflows. In this tutorial we'll take it a step further by introducing the **Object Storage API**. This API makes it easier to @@ -108,7 +108,7 @@ Here's what's happening: - We generate a filename based on the task's logical date - Using ``ObjectStoragePath``, we write the data directly to cloud storage as Parquet -This is a classic Taskflow pattern. The object key changes each day, allowing us to run this daily and build a dataset +This is a classic TaskFlow pattern. The object key changes each day, allowing us to run this daily and build a dataset over time. We return the final object path to be used in the next task. Why this is cool: No boto3, no GCS client setup, no credentials juggling. Just simple file semantics that work across diff --git a/airflow-core/docs/tutorial/pipeline.rst b/airflow-core/docs/tutorial/pipeline.rst index 4f25b695f4423..4928064401f9f 100644 --- a/airflow-core/docs/tutorial/pipeline.rst +++ b/airflow-core/docs/tutorial/pipeline.rst @@ -34,7 +34,7 @@ By the end of this tutorial, you'll have a working pipeline that: - Loads the data into a staging table - Cleans the data and upserts it into a target table -Along the way, you'll gain hands-on experience with Airflow's UI, connection system, SQL execution, and DAG authoring +Along the way, you'll gain hands-on experience with Airflow's UI, connection system, SQL execution, and Dag authoring patterns. Want to go deeper as you go? Here are two helpful references: @@ -163,7 +163,7 @@ Next, we'll download a CSV file, save it locally, and load it into ``employees_t @task def get_data(): - # NOTE: configure this as appropriate for your airflow environment + # NOTE: configure this as appropriate for your Airflow environment data_path = "/opt/airflow/dags/files/employees.csv" os.makedirs(os.path.dirname(data_path), exist_ok=True) @@ -280,7 +280,7 @@ Now that we've defined all our tasks, it's time to put them together into a DAG. @task def get_data(): - # NOTE: configure this as appropriate for your airflow environment + # NOTE: configure this as appropriate for your Airflow environment data_path = "/opt/airflow/dags/files/employees.csv" os.makedirs(os.path.dirname(data_path), exist_ok=True) diff --git a/airflow-core/docs/tutorial/taskflow.rst b/airflow-core/docs/tutorial/taskflow.rst index e535e297d3d39..8cef080ab6eaf 100644 --- a/airflow-core/docs/tutorial/taskflow.rst +++ b/airflow-core/docs/tutorial/taskflow.rst @@ -19,7 +19,7 @@ Pythonic DAGs with the TaskFlow API =================================== -In the first tutorial, you built your first Airflow DAG using traditional Operators like ``PythonOperator``. +In the first tutorial, you built your first Airflow DAG using traditional Operators like ``BashOperator``. Now let's look at a more modern and Pythonic way to write workflows using the **TaskFlow API** — introduced in Airflow 2.0. @@ -73,7 +73,7 @@ connected. Step 2: Write Your Tasks with ``@task`` --------------------------------------- -With Taskflow, each task is just a regular Python function. You can use the ``@task`` decorator to turn it into a task +With TaskFlow, each task is just a regular Python function. You can use the ``@task`` decorator to turn it into a task that Airflow can schedule and run. Here's the ``extract`` task: .. exampleinclude:: /../src/airflow/example_dags/tutorial_taskflow_api.py @@ -135,7 +135,8 @@ Here's what the same DAG might have looked like using the traditional approach: import json import pendulum - from airflow.sdk import DAG, PythonOperator + from airflow.sdk import DAG + from airflow.providers.standard.operators.python import PythonOperator def extract(): @@ -159,7 +160,7 @@ Here's what the same DAG might have looked like using the traditional approach: with DAG( dag_id="legacy_etl_pipeline", - schedule_interval=None, + schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"], @@ -173,7 +174,7 @@ Here's what the same DAG might have looked like using the traditional approach: .. note:: This version produces the same result as the TaskFlow API example, but requires explicit management of ``XComs`` and task dependencies. -The Taskflow Way +The TaskFlow Way '''''''''''''''' Using TaskFlow, all of this is handled automatically. @@ -239,7 +240,7 @@ Nice work! You've now written your first pipeline using the TaskFlow API. Curiou .. _advanced-taskflow-patterns: -Advanced Taskflow Patterns +Advanced TaskFlow Patterns -------------------------- Once you're comfortable with the basics, here are a few powerful techniques you can try. @@ -269,7 +270,7 @@ system-level packages. TaskFlow supports multiple execution environments to isol Creates a temporary virtualenv at task runtime. Great for experimental or dynamic tasks, but may have cold start overhead. -.. exampleinclude:: /../src/airflow/example_dags/example_python_decorator.py +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags//example_python_decorator.py :language: python :dedent: 4 :start-after: [START howto_operator_python_venv] @@ -283,7 +284,7 @@ overhead. Executes the task using a pre-installed Python interpreter — ideal for consistent environments or shared virtualenvs. -.. exampleinclude:: /../src/airflow/example_dags/example_python_decorator.py +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags//example_python_decorator.py :language: python :dedent: 4 :start-after: [START howto_operator_external_python] @@ -333,7 +334,7 @@ Using Sensors Use ``@task.sensor`` to build lightweight, reusable sensors using Python functions. These support both poke and reschedule modes. -.. exampleinclude:: /../src/airflow/example_dags/example_sensor_decorator.py +.. exampleinclude:: /../../providers/standard/src/airflow/providers/standard/example_dags//example_sensor_decorator.py :language: python :start-after: [START tutorial] :end-before: [END tutorial] @@ -344,7 +345,7 @@ Mixing with Traditional Tasks You can combine decorated tasks with classic Operators. This is helpful when using community providers or when migrating incrementally to TaskFlow. -You can chain Taskflow and traditional tasks using ``>>`` or pass data using the ``.output`` attribute. +You can chain TaskFlow and traditional tasks using ``>>`` or pass data using the ``.output`` attribute. .. _taskflow/accessing_context_variables: @@ -388,7 +389,7 @@ method. .. code-block:: python - from airflow.providers.standard.operators.python import get_current_context + from airflow.sdk import get_current_context def some_function_in_your_library(): @@ -423,6 +424,6 @@ What's Next Now that you've seen how to build clean, maintainable DAGs using the TaskFlow API, here are some good next steps: -- Explore asset-aware workflows in :doc:`/authoring-and-scheduling/datasets` +- Explore asset-aware workflows in :doc:`/authoring-and-scheduling/asset-scheduling` - Dive into scheduling patterns in :ref:`Scheduling Options ` - Move to the next tutorial: :doc:`/tutorial/pipeline` diff --git a/airflow-core/docs/ui.rst b/airflow-core/docs/ui.rst index cfdb210b4998e..20a91cd49220b 100644 --- a/airflow-core/docs/ui.rst +++ b/airflow-core/docs/ui.rst @@ -17,221 +17,523 @@ -UI / Screenshots -================= -The Airflow UI makes it easy to monitor and troubleshoot your data pipelines. -Here's a quick overview of some of the features and visualizations you -can find in the Airflow UI. +UI Overview +=========== +The Airflow UI provides a powerful way to monitor, manage, and troubleshoot your data pipelines and data assets. As of +Airflow 3, the UI has been refreshed with a modern look, support for dark and light themes, and a redesigned navigation +experience. +This guide offers a reference-style walkthrough of key UI components, with annotated screenshots to help new and +experienced users alike. -Dags View -......... +.. note:: + Screenshots in this guide use **dark theme** by default. Select views are also shown in **light theme** for comparison. You can toggle themes from user settings located at the bottom corner of the Airflow UI. -List of the dags in your environment, and a set of shortcuts to useful pages. -You can see exactly how many tasks succeeded, failed, or are currently -running at a glance. +.. _ui-home: -In order to filter dags (e.g. by team), you can add tags in each DAG. -The filter is saved in a cookie and can be reset by the reset button. -For example: +Home Page +--------- +The Home Page provides a high-level overview of the system state and recent activity. It is the default landing page in +Airflow 3 and includes: -.. code-block:: python +- **Health indicators** for system components such as the MetaDatabase, Scheduler, Triggerer, and Dag Processor +- **Quick links** to DAGs filtered by status (e.g., Failed DAGs, Running DAGs, Active DAGs) +- **DAG and Task Instance history**, showing counts and success/failure rates over a selectable time range +- **Recent asset events**, including materializations and triggered DAGs - dag = DAG("dag", tags=["team1", "sql"]) +This page is useful for quickly assessing the health of your environment and identifying recent issues or +asset-triggered events. +.. image:: img/ui-dark/home_dark.png + :alt: Airflow Home Page showing system health, DAG/task stats, and asset events (Dark Mode) ------------- +| -.. image:: img/dags.png +.. image:: img/ui-light/home_light.png + :alt: Airflow Home Page showing system health, DAG/task stats, and asset events (Dark Mode) ------------- +.. _ui-dag-list: +DAG List View +------------- -.. _ui:cluster-activity-view: +The DAG List View appears when you click the **DAGs** tab in the main navigation bar. It displays all DAGs available in +your environment, with a clear summary of their status, recent runs, and configuration. -Cluster Activity View -..................... +Each row includes: -Native Airflow dashboard page into the UI to collect several useful metrics for monitoring your Airflow cluster. +- **DAG ID** +- **Schedule** and next run time +- **Status of the latest DAG run** +- **Bar chart of recent runs** +- **Tags**, which can be used for grouping or filtering DAGs (e.g., ``example``, ``produces``) +- **Pause/resume toggle** +- Links to access DAG-level views ------------- +At the top of the view, you can: -.. image:: img/cluster_activity.png +- Use **filters** for DAG status, schedule state, and tags +- Use **search** or **advanced search (⌘+K)** to find specific DAGs +- Sort the list using the dropdown (e.g., Latest Run Start Date) ------------- +.. image:: img/ui-dark/dag_list.png + :alt: DAG List View in dark mode showing search, filters, and DAG-level controls +| -.. _ui:assets-view: +.. image:: img/ui-light/dag_list.png + :alt: DAG List View in light mode showing the same DAGs and actions for comparison -Asset View -............. +| -A combined listing of the current assets and a graph illustrating how they are produced and consumed by dags. +Some DAGs in this list may interact with data assets. For example, DAGs that are triggered by asset conditions may +display popups showing upstream asset inputs. -Clicking on any dataset in either the list or the graph will highlight it and its relationships, and filter the list to show the recent history of task instances that have updated that dataset and whether it has triggered further DAG runs. +.. image:: img/ui-dark/dag_list_asset_condition_popup.png + :alt: DAG List View showing asset condition popup (Dark Mode) ------------- +| -.. image:: img/assets.png +.. image:: img/ui-light/dag_list_asset_condition_popup.png + :alt: DAG List View showing asset condition popup (Light Mode) ------------- +.. _ui-dag-details: + +DAG Details Page +---------------- + +Clicking a DAG from the list opens the DAG Details Page. This view offers centralized access to a DAG's metadata, recent +activity, and task-level diagnostics. +Key elements include: + +- **DAG metadata**, including ID, owner, tags, schedule, and latest DAG version +- **Action buttons** to trigger the DAG, reparse it, or pause/resume +- **Tabbed interface**: Overview (recent failures, run counts, task logs); Grid View (status heatmap); Graph View (task dependencies); Runs (full run history); Tasks (aggregated stats); Events (system- or asset-triggered); Code (DAG source); and Details (extended metadata) + +This page also includes a visual **timeline of recent DAG runs** and a **log preview for failures**, helping users quickly identify issues across runs. + +.. image:: img/ui-dark/dag_overview_dashboard.png + :alt: DAG Details Page in dark mode showing overview dashboard and failure diagnostics + +| + +.. image:: img/ui-light/dag_overview_dashboard.png + :alt: DAG Details Page in light mode showing overview dashboard and failure diagnostics + +.. _ui-grid-view: Grid View -......... +''''''''' + +The Grid View is the primary interface for inspecting DAG runs and task states. It offers an interactive way to debug, +retry, or monitor workflows over time. + +Use Grid View to: + +- **Understand the status of recent DAG runs** at a glance +- **Identify failed or retried tasks** by color and tooltip +- **Take action** by clicking a task cell to view logs or mark task instances as successful, failed, or cleared +- **Filter tasks** by name or partial ID +- **Select a run range**, like "last 25 runs" using the dropdown above the grid + +Each row represents a task, and each column represents a DAG run. You can hover over any task instance for more detail, +or click to drill down into logs and metadata. + +.. image:: img/ui-dark/dag_overview_grid.png + :alt: Grid View showing DAG run status matrix with varied task states (Dark Mode) + +| + +.. image:: img/ui-light/dag_overview_grid.png + :alt: Grid View showing DAG run status matrix with varied task states (Light Mode) + +.. _ui-graph-view: + +Graph View +'''''''''' + +The Graph View shows the logical structure of your DAG—how tasks are connected, what order they run in, and how +branching or retries are configured. + +This view is helpful when: + +- **Debugging why a task didn't run** (e.g., skipped due to a trigger rule) +- **Understanding task dependencies** across complex pipelines +- **Inspecting run-specific task states** (e.g., success, failed, upstream failed) + +Each node represents a task. Edges show the dependencies between them. You can click any task to view its metadata and +recent run history. + +Use the dropdown at the top to switch between DAG runs and see how task states changed across executions. + +.. image:: img/ui-dark/dag_overview_graph.png + :alt: Graph View showing DAG structure with no DAG run selected (Dark Mode) + +| + +.. image:: img/ui-light/dag_overview_graph.png + :alt: Graph View showing DAG structure with no DAG run selected (Light Mode) + +.. _ui-dag-tabs: + +DAG Tabs +-------- +In addition to the interactive views like Grid and Graph, the DAG Details page includes several other tabs that provide +deeper insights and metadata: + +Runs Tab +'''''''' +The **Runs** tab displays a sortable table of all DAG runs, along with their status, execution duration, run type, and DAG version. + +.. image:: img/ui-dark/dag_overview_runs.png + :alt: DAG Runs Tab (Dark Mode) + +| + +.. image:: img/ui-light/dag_overview_runs.png + :alt: DAG Runs Tab (Light Mode) + +| + +Tasks Tab +''''''''' + +The **Tasks** tab shows metadata for each task in the DAG, including operator type, trigger rule, most recent run status, and run history. + +.. image:: img/ui-dark/dag_overview_tasks.png + :alt: DAG Tasks Tab (Dark Mode) + +| + +.. image:: img/ui-light/dag_overview_tasks.png + :alt: DAG Tasks Tab (Light Mode) + +| + +Events Tab +'''''''''' + +The **Events** tab surfaces structured events related to the DAG, such as DAG triggers and version patches. This tab is especially useful for DAG versioning and troubleshooting changes. + +.. image:: img/ui-dark/dag_overview_events.png + :alt: DAG Events Tab (Dark Mode) + +| + +.. image:: img/ui-light/dag_overview_events.png + :alt: DAG Events Tab (Light Mode) + +Code Tab +'''''''' + +The **Code** tab displays the current version of the DAG definition, including the timestamp of the last parse. Users can view the code for any specific DAG version. + +.. image:: img/ui-dark/dag_overview_code.png + :alt: DAG Code Tab (Dark Mode) + +| + +.. image:: img/ui-light/dag_overview_code.png + :alt: DAG Code Tab (Light Mode) + +| + +Details Tab +''''''''''' + +The **Details** tab provides configuration details and metadata for the DAG, including schedule, file location, concurrency limits, and version identifiers. -A bar chart and grid representation of the DAG that spans across time. -The top row is a chart of DAG Runs by duration, -and below, task instances. If a pipeline is late, -you can quickly see where the different steps are and identify -the blocking ones. +.. image:: img/ui-dark/dag_overview_details.png + :alt: DAG Details Tab (Dark Mode) +| + +.. image:: img/ui-light/dag_overview_details.png + :alt: DAG Details Tab (Light Mode) + +.. _ui-dag-runs: + +DAG Run View ------------ +Each DAG Run has its own view, accessible by selecting a specific row in the DAG's **Runs** tab. The DAG Run view +displays metadata about the selected run, as well as task-level details, rendered code, and more. + +.. image:: img/ui-dark/dag_run_task_instances.png + :alt: DAG Run - Task Instances tab (dark mode) -.. image:: img/grid.png +| + +Key elements include: + +- **DAG Run metadata**, including logical date, run type, duration, DAG version, and parsed time +- **Action buttons** to clear or mark the run, or add a note +- A persistent **Grid View sidebar**, which shows task durations and states across recent DAG runs. This helps spot recurring issues or performance trends at a glance. + +DAG Run Tabs ------------ -The details panel will update when selecting a DAG Run by clicking on a duration bar: +Task Instances +'''''''''''''' -.. image:: img/grid_run_details.png +Displays the status and metadata for each task instance within the DAG Run. Columns include: -Or selecting a Task Instance by clicking on a status box: +- Task ID +- State +- Start and End Dates +- Try Number +- Operator Type +- Duration +- DAG Version -.. image:: img/grid_instance_details.png +Each row also includes a mini Gantt-style timeline that visually represents the task's duration. -Or selecting a Task across all runs by click on the task_id: +.. image:: img/ui-light/dag_run_task_instances.png + :alt: DAG Run - Task Instances (light mode) -.. image:: img/grid_task_details.png +Events +'''''' -Manual runs are indicated by a play icon (just like the Trigger DAG button). -Asset triggered runs are indicated by a database icon: +If available, this tab lists system-level or asset-triggered events that contributed to this DAG Run's execution. -.. image:: img/run_types.png +Code +'''' -Task groups are indicated by a caret and can be opened or closed: +Displays the DAG source code as it was at the time this run was parsed. This view is helpful for debugging version drift +or comparing behavior across DAG Runs that used different code. -.. image:: img/grid_task_group.png +DAG Run code for ``hello >> airflow()``: -Mapped Tasks are indicated by square brackets and will show a table of each mapped task instance in the Mapped Tasks panel: +.. image:: img/ui-dark/dag_run_code_hello_airflow.png + :alt: DAG Run Code Snapshot - airflow() (dark mode) -.. image:: img/grid_mapped_task.png +| ------------- +.. image:: img/ui-light/dag_run_code_hello_airflow.png + :alt: DAG Run Code Snapshot - airflow() (light mode) + +| + +DAG Run code for ``hello >> world()``: + +.. image:: img/ui-dark/dag_run_code_hello_world.png + :alt: DAG Run Code Snapshot - world() (dark mode) + +| + +.. image:: img/ui-light/dag_run_code_hello_world.png + :alt: DAG Run Code Snapshot - world() (light mode) + +Details +''''''' + +Provides extended metadata for the DAG Run, including: +- Run ID and Trigger Type +- Queued At, Start and End Time, and Duration +- Data Interval boundaries +- Trigger Source and Run Config +- DAG Version ID and Bundle Name -.. _ui:graph-view: +.. image:: img/ui-dark/dag_run_details.png + :alt: DAG Run - Details tab (dark mode) + +| + +.. image:: img/ui-light/dag_run_details.png + :alt: DAG Run - Details tab (light mode) Graph View -.......... +'''''''''' -The graph view is perhaps the most comprehensive. Visualize your DAG's -dependencies and their current status for a specific run. +Shows the DAG's task dependency structure overlaid with the status of each task in this specific run. This is useful for visual debugging of task failure paths or identifying downstream blockers. ------------- +Each node includes a visual indicator of task duration. -.. image:: img/graph.png +.. image:: img/ui-dark/dag_run_graph.png + :alt: DAG Run - Graph View (dark mode) ------------- +| -Calendar View -............. +.. image:: img/ui-light/dag_run_graph.png + :alt: DAG Run - Graph View (light mode) -The calendar view gives you an overview of your entire DAG's history over months or even years. -Letting you quickly see trends of the overall success/failure rate of runs over time. +.. _ui-ti-view: ------------- +Task Instance View +------------------ -.. image:: img/calendar.png +When you click on a specific task from the DAG Run view, you're brought to the **Task Instance View**, which shows +detailed logs and metadata for that individual task execution. ------------- +.. image:: img/ui-dark/dag_task_instance_logs.png + :alt: Task Logs (dark mode) -Variable View -............. +.. _ui-ti-tabs: -The variable view allows you to list, create, edit or delete the key-value pair -of a variable used during jobs. The value of a variable will be hidden if the key contains -any words in ('password', 'secret', 'passwd', 'authorization', 'api_key', 'apikey', 'access_token') -by default, but can be configured to show in cleartext. See :ref:`security:mask-sensitive-values`. +Task Instance Tabs +------------------ ------------- +Each task instance has a tabbed view providing access to logs, rendered templates, XComs, and execution metadata. -.. image:: img/ui-dark/variable_hidden.png +Logs +'''' +The default tab shows the task logs, which include system output, error messages, and traceback information. This is the first place to look when a task fails. ------------- +.. image:: img/ui-light/dag_task_instance_logs.png + :alt: Task Logs (light mode) -Gantt Chart -........... +Rendered Templates +'''''''''''''''''' +Displays the rendered version of templated fields in your task. Useful for debugging context variables or verifying +dynamic content. -The Gantt chart lets you analyse task duration and overlap. You can quickly -identify bottlenecks and where the bulk of the time is spent for specific -DAG runs. +XCom +'''' +Shows any values pushed via ``XCom.push()`` or returned from Python functions when using TaskFlow. ------------- +.. image:: img/ui-dark/dag_run_task_instance_xcom.png + :alt: Task Instance - XCom tab (dark mode) -.. image:: img/gantt.png +| ------------- +.. image:: img/ui-light/dag_run_task_instance_xcom.png + :alt: Task Instance - XCom tab (light mode) -.. _ui:task-duration: +Events +'''''' +If present, displays relevant events related to this specific task instance execution. -Task Duration -............. +Code +'''' +Shows the DAG source code parsed at the time of execution. This helps verify what version of the DAG the task ran with. -The duration of your different tasks over the past N runs. This view lets -you find outliers and quickly understand where the time is spent in your -DAG over many runs. +Details +''''''' +Displays runtime metadata about the task instance, including: +- Task ID and State +- DAG Run ID, DAG Version, and Bundle Name +- Operator used and runtime duration +- Pool and slot usage +- Executor and configuration ------------- +.. image:: img/ui-dark/dag_task_instance_details.png + :alt: Task Instance - Details tab (dark mode) -.. image:: img/duration.png +| ------------- +.. image:: img/ui-light/dag_task_instance_details.png + :alt: Task Instance - Details tab (light mode) -.. _ui:landing-times: +.. _ui-asset-views: -Landing Times -............. +Asset Views +----------- -The landing time for a task instance is the delta between the dag run's data interval end -(typically this means when the dag "should" run) and the dag run completion time. +The **Assets** section provides a dedicated interface to monitor and debug asset-centric workflows. Assets represent +logical data units—such as files, tables, or models—that tasks can produce or consume. Airflow tracks these dependencies +and provides visualizations to better understand their orchestration. ------------- +Asset List +'''''''''' -.. image:: img/landing_times.png +The Asset List shows all known assets, grouped by name. For each asset, you can see: ------------- +- The group the asset belongs to (if any) +- The DAGs that consume the asset +- The tasks that produce the asset -Code View -......... +Hovering over a count of DAGs or tasks shows a tooltip with the full list of producers or consumers. -Transparency is everything. While the code for your pipeline is in source -control, this is a quick way to get to the code that generates the DAG and -provide yet more context. +.. image:: img/ui-dark/asset_list_consuming_dags.png + :alt: Asset Graph View (dark mode) ------------- +| -.. image:: img/code.png +.. image:: img/ui-light/asset_list_consuming_dags.png + :alt: Asset Graph View (light mode) -Trigger Form -............ +Clicking on the link takes you to the Asset Graph View. -If you trigger a manual DAG run with the arrow-button, a form is displayed. -The form display is based on the DAG Parameters as described in :doc:`core-concepts/params`. +Asset Graph View +'''''''''''''''' ------------- +The Asset Graph View shows the asset in context, including upstream producers and downstream consumers. You can use this view to: + +- Understand asset lineage and the DAGs involved +- Trigger asset events manually +- View recent asset events and the DAG runs they triggered + +.. image:: img/ui-dark/asset_view.png + :alt: Asset Graph View (dark mode) + +| + +.. image:: img/ui-light/asset_view.png + :alt: Asset Graph View (light mode) + + +Graph Overlays in DAG View +'''''''''''''''''''''''''' + +When a DAG contains asset-producing or asset-consuming tasks, you can enable asset overlays on the DAG Graph view. Toggle the switches next to each asset to: + +- See how assets flow between DAGs +- Inspect asset-triggered dependencies + +Two graph modes are available: + +- **All DAG Dependencies**: Shows all DAG-to-DAG and task-level connections + + .. image:: img/ui-dark/dag_graph_all_dependencies.png + :alt: DAG Graph View - All Dependencies (dark mode) + + | -.. image:: img/trigger-dag-tutorial-form-1.png + .. image:: img/ui-light/dag_graph_all_dependencies.png + :alt: DAG Graph View - All Dependencies (light mode) -Audit Log -............. + | -See all events related to a DAG. Filter events by changing the Task and DAG Run -selection and by including/excluding different event names. +- **External Conditions**: Shows only DAGs triggered via asset events + + .. image:: img/ui-dark/dag_graph_external_conditions.png + :alt: DAG Graph View - External Conditions Only (dark mode) + + | + + .. image:: img/ui-light/dag_graph_external_conditions.png + :alt: DAG Graph View - External Conditions Only (light mode) + +.. _ui-admin-views: + +Admin Views +----------- + +The **Admin** tab provides system-level tools for configuring and extending Airflow. These views are primarily intended for administrators and platform operators responsible for deployment, integration, and performance tuning. + +Key pages include: + +- **Variables** – Store key-value pairs accessible from DAGs. Variables can be used to manage environment-specific parameters or secrets. +- **Connections** – Define connection URIs to external systems such as databases, cloud services, or APIs. These are consumed by Airflow operators and hooks. +- **Pools** – Control resource allocation by limiting the number of concurrently running tasks assigned to a named pool. Useful for managing contention or quota-constrained systems. +- **Providers** – View installed provider packages (e.g., ``apache-airflow-providers-google``), including available hooks, sensors, and operators. This is helpful for verifying provider versions or troubleshooting import errors. +- **Plugins** – Inspect registered Airflow plugins that extend the platform via custom operators, macros, or UI elements. +- **Config** – View the full effective Airflow configuration as parsed from ``airflow.cfg``, environment variables, or overridden defaults. This can help debug issues related to scheduler behavior, secrets backends, and more. + +.. note:: + The Admin tab is only visible to users with appropriate RBAC permissions. ------------ -.. image:: img/audit_log.png +.. image:: img/ui-dark/variable_hidden.png ------------ + +.. image:: img/ui-dark/admin_connections.png + +------------ + +.. image:: img/ui-dark/admin_connections_add.png diff --git a/airflow-core/hatch_build.py b/airflow-core/hatch_build.py index 438767bcd9d5d..d691e6c25cd1d 100644 --- a/airflow-core/hatch_build.py +++ b/airflow-core/hatch_build.py @@ -19,10 +19,10 @@ import logging import os import shutil -from collections.abc import Iterable +from collections.abc import Callable, Iterable from pathlib import Path from subprocess import run -from typing import Any, Callable +from typing import Any from hatchling.builders.config import BuilderConfig from hatchling.builders.plugin.interface import BuilderInterface diff --git a/airflow-core/newsfragments/24842.significant.rst b/airflow-core/newsfragments/24842.significant.rst deleted file mode 100644 index f1b5e57cba6d5..0000000000000 --- a/airflow-core/newsfragments/24842.significant.rst +++ /dev/null @@ -1,17 +0,0 @@ -Default DAG schedule changed to *None* - -When a *schedule* parameter is not passed to the ``DAG`` constructor, Airflow -now defaults to never automatically schedule the DAG at all. The created DAG -can still be manually triggered, either by the user directly, or from another -DAG with ``TriggerDagRunOperator``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/40029.significant.rst b/airflow-core/newsfragments/40029.significant.rst deleted file mode 100644 index 1d9bc26ef858a..0000000000000 --- a/airflow-core/newsfragments/40029.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -Removed deprecated airflow configuration ``webserver.allow_raw_html_descriptions`` from UI Trigger forms. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``webserver.allow_raw_html_descriptions`` diff --git a/airflow-core/newsfragments/40931.significant.rst b/airflow-core/newsfragments/40931.significant.rst deleted file mode 100644 index 7893e6dc0a014..0000000000000 --- a/airflow-core/newsfragments/40931.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Removed dagbag deprecated ``store_serialized_dags`` parameter. Please use ``read_dags_from_db`` parameter. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/40936.bugfix.rst b/airflow-core/newsfragments/40936.bugfix.rst deleted file mode 100644 index 207aeb97521af..0000000000000 --- a/airflow-core/newsfragments/40936.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix: ``on_success_callback`` will no longer execute if a task is skipped. Previously, this callback was triggered even when the task was skipped, which could lead to unintended behavior or inconsistencies in downstream processes. This is a breaking change because workflows that rely on ``on_success_callback`` running for skipped tasks will need to be updated. Consider updating your DAGs to handle cases where the callback is not invoked due to task skipping. diff --git a/airflow-core/newsfragments/41096.significant.rst b/airflow-core/newsfragments/41096.significant.rst deleted file mode 100644 index e08520b14255d..0000000000000 --- a/airflow-core/newsfragments/41096.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -Removed deprecated ``processor_poll_interval`` configuration parameter from ``scheduler`` section. Please use ``scheduler_idle_sleep_time`` configuration parameter. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``scheduler.processor_poll_interval`` → ``scheduler.scheduler_idle_sleep_time`` diff --git a/airflow-core/newsfragments/41348.significant.rst b/airflow-core/newsfragments/41348.significant.rst deleted file mode 100644 index b212a5f18e35f..0000000000000 --- a/airflow-core/newsfragments/41348.significant.rst +++ /dev/null @@ -1,324 +0,0 @@ -Rename ``Dataset`` as ``Asset`` - -* list of renamed objects - - * Rename module ``airflow.api_connexion.schemas.dataset_schema`` as ``airflow.api_connexion.schemas.asset_schema`` - - * Rename variable ``create_dataset_event_schema`` as ``create_asset_event_schema`` - * Rename variable ``dataset_collection_schema`` as ``asset_collection_schema`` - * Rename variable ``dataset_event_collection_schema`` as ``asset_event_collection_schema`` - * Rename variable ``dataset_event_schema`` as ``asset_event_schema`` - * Rename variable ``dataset_schema`` as ``asset_schema`` - * Rename class ``TaskOutletDatasetReferenceSchema`` as ``TaskOutletAssetReferenceSchema`` - * Rename class ``DagScheduleDatasetReferenceSchema`` as ``DagScheduleAssetReferenceSchema`` - * Rename class ``DatasetAliasSchema`` as ``AssetAliasSchema`` - * Rename class ``DatasetSchema`` as ``AssetSchema`` - * Rename class ``DatasetCollection`` as ``AssetCollection`` - * Rename class ``DatasetEventSchema`` as ``AssetEventSchema`` - * Rename class ``DatasetEventCollection`` as ``AssetEventCollection`` - * Rename class ``DatasetEventCollectionSchema`` as ``AssetEventCollectionSchema`` - * Rename class ``CreateDatasetEventSchema`` as ``CreateAssetEventSchema`` - - * Move module ``airflow.datasets`` to ``airflow.sdk.definitions.asset`` - - * Rename class ``DatasetAlias`` as ``AssetAlias`` - * Rename class ``DatasetAll`` as ``AssetAll`` - * Rename class ``DatasetAny`` as ``AssetAny`` - * Rename function ``expand_alias_to_datasets`` as ``expand_alias_to_assets`` - * Rename class ``DatasetAliasEvent`` as ``AssetAliasEvent`` - - * Rename attribute ``dest_dataset_uri`` as ``dest_asset_uri`` - - * Rename class ``BaseDataset`` as ``BaseAsset`` - - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` - - * Rename class ``Dataset`` as ``Asset`` - - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` - - * Rename class ``_DatasetBooleanCondition`` as ``_AssetBooleanCondition`` - - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_asset_aliases`` - - * Rename module ``airflow.datasets.manager`` as ``airflow.assets.manager`` - - * Rename variable ``dataset_manager`` as ``asset_manager`` - * Rename function ``resolve_dataset_manager`` as ``resolve_asset_manager`` - * Rename class ``DatasetManager`` as ``AssetManager`` - - * Rename method ``register_dataset_change`` as ``register_asset_change`` - * Rename method ``create_datasets`` as ``create_assets`` - * Rename method ``register_dataset_change`` as ``notify_asset_created`` - * Rename method ``notify_dataset_changed`` as ``notify_asset_changed`` - * Rename method ``notify_dataset_alias_created`` as ``notify_asset_alias_created`` - - * Rename module ``airflow.models.dataset`` as ``airflow.models.asset`` - - * Rename class ``DatasetDagRunQueue`` as ``AssetDagRunQueue`` - * Rename class ``DatasetEvent`` as ``AssetEvent`` - * Rename class ``DatasetModel`` as ``AssetModel`` - * Rename class ``DatasetAliasModel`` as ``AssetAliasModel`` - * Rename class ``DagScheduleDatasetReference`` as ``DagScheduleAssetReference`` - * Rename class ``TaskOutletDatasetReference`` as ``TaskOutletAssetReference`` - * Rename class ``DagScheduleDatasetAliasReference`` as ``DagScheduleAssetAliasReference`` - - * Rename module ``airflow.api_ui.views.datasets`` as ``airflow.api_ui.views.assets`` - - * Rename variable ``dataset_router`` as ``asset_rounter`` - - * Rename module ``airflow.listeners.spec.dataset`` as ``airflow.listeners.spec.asset`` - - * Rename function ``on_dataset_created`` as ``on_asset_created`` - * Rename function ``on_dataset_changed`` as ``on_asset_changed`` - - * Rename module ``airflow.timetables.datasets`` as ``airflow.timetables.assets`` - - * Rename class ``DatasetOrTimeSchedule`` as ``AssetOrTimeSchedule`` - - * Rename module ``airflow.serialization.pydantic.dataset`` as ``airflow.serialization.pydantic.asset`` - - * Rename class ``DagScheduleDatasetReferencePydantic`` as ``DagScheduleAssetReferencePydantic`` - * Rename class ``TaskOutletDatasetReferencePydantic`` as ``TaskOutletAssetReferencePydantic`` - * Rename class ``DatasetPydantic`` as ``AssetPydantic`` - * Rename class ``DatasetEventPydantic`` as ``AssetEventPydantic`` - - * Rename module ``airflow.datasets.metadata`` as ``airflow.sdk.definitions.asset.metadata`` - - * In module ``airflow.jobs.scheduler_job_runner`` - - * and its class ``SchedulerJobRunner`` - - * Rename method ``_create_dag_runs_dataset_triggered`` as ``_create_dag_runs_asset_triggered`` - * Rename method ``_orphan_unreferenced_datasets`` as ``_orphan_unreferenced_datasets`` - - * In module ``airflow.api_connexion.security`` - - * Rename decorator ``requires_access_dataset`` as ``requires_access_asset`` - - * In module ``airflow.api_fastapi.auth.managers.models.resource_details`` - - * Rename class ``DatasetDetails`` as ``AssetDetails`` - - * In module ``airflow.api_fastapi.auth.managers.base_auth_manager`` - - * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` - - * In module ``airflow.timetables.simple`` - - * Rename class ``DatasetTriggeredTimetable`` as ``AssetTriggeredTimetable`` - - * In module ``airflow.lineage.hook`` - - * Rename class ``DatasetLineageInfo`` as ``AssetLineageInfo`` - - * Rename attribute ``dataset`` as ``asset`` - - * In its class ``HookLineageCollector`` - - * Rename method ``create_dataset`` as ``create_asset`` - * Rename method ``add_input_dataset`` as ``add_input_asset`` - * Rename method ``add_output_dataset`` as ``add_output_asset`` - * Rename method ``collected_datasets`` as ``collected_assets`` - - * In module ``airflow.models.dag`` - - * Rename function ``get_dataset_triggered_next_run_info`` as ``get_asset_triggered_next_run_info`` - - * In its class ``DagModel`` - - * Rename method ``get_dataset_triggered_next_run_info`` as ``get_asset_triggered_next_run_info`` - - * In module ``airflow.models.taskinstance`` - - * and its class ``TaskInstance`` - - * Rename method ``_register_dataset_changes`` as ``_register_asset_changes`` - - * In module ``airflow.providers_manager`` - - * and its class ``ProvidersManager`` - - * Rename method ``initialize_providers_dataset_uri_resources`` as ``initialize_providers_asset_uri_resources`` - * Rename attribute ``_discover_dataset_uri_resources`` as ``_discover_asset_uri_resources`` - * Rename property ``dataset_factories`` as ``asset_factories`` - * Rename property ``dataset_uri_handlers`` as ``asset_uri_handlers`` - * Rename property ``dataset_to_openlineage_converters`` as ``asset_to_openlineage_converters`` - - * In module ``airflow.security.permissions`` - - * Rename constant ``RESOURCE_DATASET`` as ``RESOURCE_ASSET`` - - * In module ``airflow.serialization.enums`` - - * and its class DagAttributeTypes - - * Rename attribute ``DATASET_EVENT_ACCESSORS`` as ``ASSET_EVENT_ACCESSORS`` - * Rename attribute ``DATASET_EVENT_ACCESSOR`` as ``ASSET_EVENT_ACCESSOR`` - * Rename attribute ``DATASET`` as ``ASSET`` - * Rename attribute ``DATASET_ALIAS`` as ``ASSET_ALIAS`` - * Rename attribute ``DATASET_ANY`` as ``ASSET_ANY`` - * Rename attribute ``DATASET_ALL`` as ``ASSET_ALL`` - - * In module ``airflow.serialization.pydantic.taskinstance`` - - * and its class ``TaskInstancePydantic`` - - * Rename method ``_register_dataset_changes`` as ``_register_dataset_changes`` - - * In module ``airflow.serialization.serialized_objects`` - - * Rename function ``encode_dataset_condition`` as ``encode_asset_condition`` - * Rename function ``decode_dataset_condition`` as ``decode_asset_condition`` - - * In module ``airflow.timetables.base`` - - * Rename class ```_NullDataset``` as ```_NullAsset``` - - * Rename method ``iter_datasets`` as ``iter_assets`` - * Rename method ``iter_dataset_aliases`` as ``iter_assets_aliases`` - - * In module ``airflow.utils.context`` - - * Rename class ``LazyDatasetEventSelectSequence`` as ``LazyAssetEventSelectSequence`` - - * In module ``airflow.www.auth`` - - * Rename function ``has_access_dataset`` as ``has_access_asset`` - - * Rename configuration ``core.dataset_manager_class`` as ``core.asset_manager_class`` and ``core.dataset_manager_kwargs`` as ``core.asset_manager_kwargs`` - * Rename example dags ``example_dataset_alias.py``, ``example_dataset_alias_with_no_taskflow.py``, ``example_datasets.py`` as ``example_asset_alias.py``, ``example_asset_alias_with_no_taskflow.py``, ``example_assets.py`` - * Rename DagDependency name ``dataset-alias``, ``dataset`` as ``asset-alias``, ``asset`` - * Rename context key ``triggering_dataset_events`` as ``triggering_asset_events`` - * Rename resource key ``dataset-uris`` as ``asset-uris`` for providers amazon, common.io, mysql, fab, postgres, trino - - * In provider ``airflow.providers.amazon.aws`` - - * Rename package ``datasets`` as ``assets`` - - * In its module ``s3`` - - * Rename method ``create_dataset`` as ``create_asset`` - * Rename method ``convert_dataset_to_openlineage`` as ``convert_asset_to_openlineage`` - - * and its module ``auth_manager.avp.entities`` - - * Rename attribute ``AvpEntities.DATASET`` as ``AvpEntities.ASSET`` - - * and its module ``auth_manager.auth_manager.aws_auth_manager`` - - * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` - - * In provider ``airflow.providers.common.io`` - - * Rename package ``datasets`` as ``assets`` - - * in its module ``file`` - - * Rename method ``create_dataset`` as ``create_asset`` - * Rename method ``convert_dataset_to_openlineage`` as ``convert_asset_to_openlineage`` - - * In provider ``airflow.providers.fab`` - - * in its module ``auth_manager.fab_auth_manager`` - - * Rename function ``is_authorized_dataset`` as ``is_authorized_asset`` - - * In provider ``airflow.providers.openlineage`` - - * in its module ``utils.utils`` - - * Rename class ``DatasetInfo`` as ``AssetInfo`` - * Rename function ``translate_airflow_dataset`` as ``translate_airflow_asset`` - - * Rename package ``airflow.providers.postgres.datasets`` as ``airflow.providers.postgres.assets`` - * Rename package ``airflow.providers.mysql.datasets`` as ``airflow.providers.mysql.assets`` - * Rename package ``airflow.providers.trino.datasets`` as ``airflow.providers.trino.assets`` - * Add module ``airflow.providers.common.compat.assets`` - * Add module ``airflow.providers.common.compat.openlineage.utils.utils`` - * Add module ``airflow.providers.common.compat.security.permissions`` - -* Types of change - - * [x] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``core.dataset_manager_class`` → ``core.asset_manager_class`` - * [x] ``core.dataset_manager_kwargs`` → ``core.asset_manager_kwargs`` - - * ruff - - * AIR302 - - * [ ] context key ``triggering_dataset_events`` → ``triggering_asset_events`` - * [x] ``airflow.api_connexion.security.requires_access_dataset`` → ``airflow.api_connexion.security.requires_access_asset`` - * [x] ``airflow.auth.managers.base_auth_manager.is_authorized_dataset`` → ``airflow.api_fastapi.auth.managers.base_auth_manager.is_authorized_asset`` - * [x] ``airflow.auth.managers.models.resource_details.DatasetDetails`` → ``airflow.api_fastapi.auth.managers.models.resource_details.AssetDetails`` - * [x] ``airflow.lineage.hook.DatasetLineageInfo`` → ``airflow.lineage.hook.AssetLineageInfo`` - * [x] ``airflow.security.permissions.RESOURCE_DATASET`` → ``airflow.security.permissions.RESOURCE_ASSET`` - * [x] ``airflow.www.auth.has_access_dataset`` → ``airflow.www.auth.has_access_dataset.has_access_asset`` - * [x] ``airflow.datasets.DatasetAliasEvent`` - * [x] ``airflow.datasets.Dataset`` → ``airflow.sdk.definitions.asset.Asset`` - * [x] ``airflow.Dataset`` → ``airflow.sdk.definitions.asset.Asset`` - * [x] ``airflow.datasets.DatasetAlias`` → ``airflow.sdk.definitions.asset.AssetAlias`` - * [x] ``airflow.datasets.DatasetAll`` → ``airflow.sdk.definitions.asset.AssetAll`` - * [x] ``airflow.datasets.DatasetAny`` → ``airflow.sdk.definitions.asset.AssetAny`` - * [x] ``airflow.datasets.metadata`` → ``airflow.sdk.definitions.asset.metadata`` - * [x] ``airflow.datasets.expand_alias_to_datasets`` → ``airflow.sdk.definitions.asset.expand_alias_to_assets`` - * [x] ``airflow.datasets.manager.dataset_manager`` → ``airflow.assets.manager`` - * [x] ``airflow.datasets.manager.resolve_dataset_manager`` → ``airflow.assets.resolve_asset_manager`` - * [x] ``airflow.datasets.manager.DatasetManager`` → ``airflow.assets.AssetManager`` - * [x] ``airflow.listeners.spec.dataset.on_dataset_created`` → ``airflow.listeners.spec.asset.on_asset_created`` - * [x] ``airflow.listeners.spec.dataset.on_dataset_changed`` → ``airflow.listeners.spec.asset.on_asset_changed`` - * [x] ``airflow.timetables.simple.DatasetTriggeredTimetable`` → ``airflow.timetables.simple.AssetTriggeredTimetable`` - * [x] ``airflow.timetables.datasets.DatasetOrTimeSchedule`` → ``airflow.timetables.assets.AssetOrTimeSchedule`` - * [x] ``airflow.providers.amazon.auth_manager.avp.entities.AvpEntities.DATASET`` → ``airflow.providers.amazon.auth_manager.avp.entities.AvpEntities.ASSET`` - * [x] ``airflow.providers.amazon.aws.datasets.s3.create_dataset`` → ``airflow.providers.amazon.aws.assets.s3.create_asset`` - * [x] ``airflow.providers.amazon.aws.datasets.s3.convert_dataset_to_openlineage`` → ``airflow.providers.amazon.aws.datasets.s3.convert_dataset_to_openlineage`` - * [x] ``airflow.providers.amazon.aws.datasets.s3.sanitize_uri`` → ``airflow.providers.amazon.aws.assets.s3.sanitize_uri`` - * [x] ``airflow.providers.common.io.datasets.file.convert_dataset_to_openlineage`` → ``airflow.providers.common.io.assets.file.convert_asset_to_openlineage`` - * [x] ``airflow.providers.common.io.datasets.file.sanitize_uri`` → ``airflow.providers.common.io.assets.file.sanitize_uri`` - * [x] ``airflow.providers.common.io.datasets.file.create_dataset`` → ``airflow.providers.common.io.assets.file.create_asset`` - * [x] ``airflow.providers.google.datasets.bigquery.sanitize_uri`` → ``airflow.providers.google.assets.bigquery.sanitize_uri`` - * [x] ``airflow.providers.google.datasets.gcs.create_dataset`` → ``airflow.providers.google.assets.gcs.create_asset`` - * [x] ``airflow.providers.google.datasets.gcs.sanitize_uri`` → ``airflow.providers.google.assets.gcs.sanitize_uri`` - * [x] ``airflow.providers.google.datasets.gcs.convert_dataset_to_openlineage`` → ``airflow.providers.google.assets.gcs.convert_asset_to_openlineage`` - * [x] ``airflow.providers.fab.auth_manager.fab_auth_manager.is_authorized_dataset`` → ``airflow.providers.fab.auth_manager.fab_auth_manager.is_authorized_asset`` - * [x] ``airflow.providers.openlineage.utils.utils.DatasetInfo`` → ``airflow.providers.openlineage.utils.utils.AssetInfo`` - * [x] ``airflow.providers.openlineage.utils.utils.translate_airflow_dataset`` → ``airflow.providers.openlineage.utils.utils.translate_airflow_asset`` - * [x] ``airflow.providers.postgres.datasets.postgres.sanitize_uri`` → ``airflow.providers.postgres.assets.postgres.sanitize_uri`` - * [x] ``airflow.providers.mysql.datasets.mysql.sanitize_uri`` → ``airflow.providers.mysql.assets.mysql.sanitize_uri`` - * [x] ``airflow.providers.trino.datasets.trino.sanitize_uri`` → ``airflow.providers.trino.assets.trino.sanitize_uri`` - * [x] property ``airflow.providers_manager.ProvidersManager.dataset_factories`` → ``airflow.providers_manager.ProvidersManager.asset_factories`` - * [x] property ``airflow.providers_manager.ProvidersManager.dataset_uri_handlers`` → ``airflow.providers_manager.ProvidersManager.asset_uri_handlers`` - * [x] property ``airflow.providers_manager.ProvidersManager.dataset_to_openlineage_converters`` → ``airflow.providers_manager.ProvidersManager.asset_to_openlineage_converters`` - * [x] class attribute ``airflow.lineage.hook.DatasetLineageInfo.dataset`` → ``airflow.lineage.hook.AssetLineageInfo.asset`` - * [x] method ``airflow.datasets.manager.DatasetManager.register_dataset_change`` → ``airflow.assets.manager.AssetManager.register_asset_change`` - * [x] method ``airflow.datasets.manager.DatasetManager.create_datasets`` → ``airflow.assets.manager.AssetManager.create_assets`` - * [x] method ``airflow.datasets.manager.DatasetManager.notify_dataset_created`` → ``airflow.assets.manager.AssetManager.notify_asset_created`` - * [x] method ``airflow.datasets.manager.DatasetManager.notify_dataset_changed`` → ``airflow.assets.manager.AssetManager.notify_asset_changed`` - * [x] method ``airflow.datasets.manager.DatasetManager.notify_dataset_alias_created`` → ``airflow.assets.manager.AssetManager.notify_asset_alias_created`` - * [x] method ``airflow.providers.amazon.auth_manager.aws_auth_manager.AwsAuthManager.is_authorized_dataset`` → ``airflow.providers.amazon.auth_manager.aws_auth_manager.AwsAuthManager.is_authorized_asset`` - * [x] method ``airflow.lineage.hook.HookLineageCollector.create_dataset`` → ``airflow.lineage.hook.HookLineageCollector.create_asset`` - * [x] method ``airflow.lineage.hook.HookLineageCollector.add_input_dataset`` → ``airflow.lineage.hook.HookLineageCollector.add_input_asset`` - * [x] method ``airflow.lineage.hook.HookLineageCollector.add_output_dataset`` → ``airflow.lineage.hook.HookLineageCollector.dd_output_asset`` - * [x] method ``airflow.lineage.hook.HookLineageCollector.collected_datasets`` → ``airflow.lineage.hook.HookLineageCollector.collected_assets`` - * [x] method ``airflow.providers_manager.ProvidersManager.initialize_providers_dataset_uri_resources`` → ``airflow.providers_manager.ProvidersManager.initialize_providers_asset_uri_resources`` - * [x] method ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_uri`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_value`` - * [x] method ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connections`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connection`` - * [x] method ``airflow.hooks.base.BaseHook.get_connections`` → ``airflow.hooks.base.BaseHook.get_connection`` - * [x] method ``airflow.datasets.BaseDataset.iter_datasets`` → ``airflow.sdk.definitions.asset.BaseAsset.iter_assets`` - * [x] method ``airflow.datasets.BaseDataset.iter_dataset_aliases`` → ``airflow.sdk.definitions.asset.BaseAsset.iter_asset_aliases`` diff --git a/airflow-core/newsfragments/41366.significant.rst b/airflow-core/newsfragments/41366.significant.rst deleted file mode 100644 index edd588d90bd75..0000000000000 --- a/airflow-core/newsfragments/41366.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -``airflow.contrib`` modules have been removed - -All modules from ``airflow.contrib``, which were deprecated in Airflow 2, have been removed. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.contrib.*`` diff --git a/airflow-core/newsfragments/41367.significant.rst b/airflow-core/newsfragments/41367.significant.rst deleted file mode 100644 index 1e7405bd8e2ee..0000000000000 --- a/airflow-core/newsfragments/41367.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -Deprecated ``ImportError`` removed from ``airflow.models`` - -The deprecated ``ImportError`` class can no longer be imported from ``airflow.models``. -It has been moved to ``airflow.models.errors.ParseImportError``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41368.significant.rst b/airflow-core/newsfragments/41368.significant.rst deleted file mode 100644 index 641696a4b3ffb..0000000000000 --- a/airflow-core/newsfragments/41368.significant.rst +++ /dev/null @@ -1,171 +0,0 @@ -Support for deprecated core imports removed - -Support for importing classes etc from the following locations was deprecated at various times during Airflow 2s lifecycle, and has been removed: - -- ``airflow.executors`` -- ``airflow.hooks`` -- ``airflow.macros`` -- ``airflow.operators`` -- ``airflow.sensors`` - -Instead, import from the right provider or more specific module instead. -For example, instead of ``from airflow.sensors import TimeDeltaSensor``, use ``from airflow.sensors.time_delta import TimeDeltaSensor``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.sensors.base_sensor_operator.BaseSensorOperator`` → ``airflow.sdk.bases.sensor.BaseSensorOperator`` - * [x] ``airflow.hooks.base_hook.BaseHook`` → ``airflow.hooks.base.BaseHook`` - - * AIR303 - - * [x] ``airflow.sensors.external_task_sensor.ExternalTaskMarker`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskMarker`` - * [x] ``airflow.sensors.external_task_sensor.ExternalTaskSensor`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskSensor`` - * [x] ``airflow.sensors.external_task_sensor.ExternalTaskSensorLink`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskSensorLink`` - * [x] ``airflow.sensors.time_delta_sensor.TimeDeltaSensor`` → ``airflow.providers.standard.sensors.time_delta.TimeDeltaSensor`` - * [x] ``airflow.operators.dagrun_operator.TriggerDagRunLink`` → ``airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink`` - * [x] ``airflow.operators.dagrun_operator.TriggerDagRunOperator`` → ``airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator`` - * [x] ``airflow.operators.python_operator.BranchPythonOperator`` → ``airflow.providers.standard.operators.python.BranchPythonOperator`` - * [x] ``airflow.operators.python_operator.PythonOperator`` → ``airflow.providers.standard.operators.python.PythonOperator`` - * [x] ``airflow.operators.python_operator.PythonVirtualenvOperator`` → ``airflow.providers.standard.operators.python.PythonVirtualenvOperator`` - * [x] ``airflow.operators.python_operator.ShortCircuitOperator`` → ``airflow.providers.standard.operators.python.ShortCircuitOperator`` - * [x] ``airflow.operators.latest_only_operator.LatestOnlyOperator`` → ``airflow.providers.standard.operators.latest_only.LatestOnlyOperator`` - * [x] ``airflow.operators.bash_operator.BashOperator`` → ``airflow.providers.standard.operators.bash.BashOperator`` - * [x] ``airflow.operators.branch_operator.BaseBranchOperator`` → ``airflow.providers.standard.operators.branch.BaseBranchOperator`` - * [x] ``airflow.sensors.date_time_sensor.DateTimeSensor`` → ``airflow.providers.standardi.sensors.DateTimeSensor`` - * [x] ``airflow.operators.dummy.EmptyOperator`` → ``airflow.providers.standard.operators.empty.EmptyOperator`` - * [x] ``airflow.operators.dummy.DummyOperator`` → ``airflow.providers.standard.operators.empty.EmptyOperator`` - * [x] ``airflow.operators.dummy_operator.EmptyOperator`` → ``airflow.providers.standard.operators.empty.EmptyOperator`` - * [x] ``airflow.operators.dummy_operator.DummyOperator`` → ``airflow.providers.standard.operators.empty.EmptyOperator`` - * [x] ``airflow.operators.email_operator.EmailOperator`` → ``airflow.providers.smtp.operators.smtp.EmailOperator`` - * [x] ``airflow.executors.celery_executor.CeleryExecutor`` → ``airflow.providers.celery.executors.celery_executor.CeleryExecutor`` - * [x] ``airflow.executors.celery_kubernetes_executor.CeleryKubernetesExecutor`` → ``airflow.providers.celery.executors.celery_kubernetes_executor.CeleryKubernetesExecutor`` - * [x] ``airflow.executors.dask_executor.DaskExecutor`` → ``airflow.providers.daskexecutor.executors.dask_executor.DaskExecutor`` - * [x] ``airflow.executors.kubernetes_executor.KubernetesExecutor`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor`` - * [x] ``airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler`` - * [x] ``airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher`` - * [x] ``airflow.executors.kubernetes_executor_utils.ResourceVersion`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.ResourceVersion`` - * [x] ``airflow.executors.local_kubernetes_executor.LocalKubernetesExecutor`` → ``airflow.providers.cncf.kubernetes.executors.LocalKubernetesExecutor`` - * [x] ``airflow.hooks.S3_hook.S3Hook`` → ``airflow.providers.amazon.aws.hooks.s3.S3Hook`` - * [x] ``airflow.hooks.S3_hook.provide_bucket_name`` → ``airflow.providers.amazon.aws.hooks.s3.provide_bucket_name`` - * [x] ``airflow.hooks.base_hook.BaseHook`` → ``airflow.hooks.base.BaseHook`` - * [x] ``airflow.hooks.dbapi_hook.DbApiHook`` → ``airflow.providers.common.sql.hooks.sql.DbApiHook`` - * [x] ``airflow.hooks.docker_hook.DockerHook`` → ``airflow.providers.docker.hooks.docker.DockerHook`` - * [x] ``airflow.hooks.druid_hook.DruidDbApiHook`` → ``airflow.providers.apache.druid.hooks.druid.DruidDbApiHook`` - * [x] ``airflow.hooks.druid_hook.DruidHook`` → ``airflow.providers.apache.druid.hooks.druid.DruidHook`` - * [x] ``airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES`` → ``airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES`` - * [x] ``airflow.hooks.hive_hooks.HiveCliHook`` → ``airflow.providers.apache.hive.hooks.hive.HiveCliHook`` - * [x] ``airflow.hooks.hive_hooks.HiveMetastoreHook`` → ``airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook`` - * [x] ``airflow.hooks.hive_hooks.HiveServer2Hook`` → ``airflow.providers.apache.hive.hooks.hive.HiveServer2Hook`` - * [x] ``airflow.hooks.http_hook.HttpHook`` → ``airflow.providers.http.hooks.http.HttpHook`` - * [x] ``airflow.hooks.jdbc_hook.JdbcHook`` → ``airflow.providers.jdbc.hooks.jdbc.JdbcHook`` - * [x] ``airflow.hooks.jdbc_hook.jaydebeapi`` → ``airflow.providers.jdbc.hooks.jdbc.jaydebeapi`` - * [x] ``airflow.hooks.mssql_hook.MsSqlHook`` → ``airflow.providers.microsoft.mssql.hooks.mssql.MsSqlHook`` - * [x] ``airflow.hooks.mysql_hook.MySqlHook`` → ``airflow.providers.mysql.hooks.mysql.MySqlHook`` - * [x] ``airflow.hooks.oracle_hook.OracleHook`` → ``airflow.providers.oracle.hooks.oracle.OracleHook`` - * [x] ``airflow.hooks.pig_hook.PigCliHook`` → ``airflow.providers.apache.pig.hooks.pig.PigCliHook`` - * [x] ``airflow.hooks.postgres_hook.PostgresHook`` → ``airflow.providers.postgres.hooks.postgres.PostgresHook`` - * [x] ``airflow.hooks.presto_hook.PrestoHook`` → ``airflow.providers.presto.hooks.presto.PrestoHook`` - * [x] ``airflow.hooks.samba_hook.SambaHook`` → ``airflow.providers.samba.hooks.samba.SambaHook`` - * [x] ``airflow.hooks.slack_hook.SlackHook`` → ``airflow.providers.slack.hooks.slack.SlackHook`` - * [x] ``airflow.hooks.sqlite_hook.SqliteHook`` → ``airflow.providers.sqlite.hooks.sqlite.SqliteHook`` - * [x] ``airflow.hooks.webhdfs_hook.WebHDFSHook`` → ``airflow.providers.apache.hdfs.hooks.webhdfs.WebHDFSHook`` - * [x] ``airflow.hooks.zendesk_hook.ZendeskHook`` → ``airflow.providers.zendesk.hooks.zendesk.ZendeskHook`` - * [x] ``airflow.operators.check_operator.SQLCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` - * [x] ``airflow.operators.check_operator.SQLIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` - * [x] ``airflow.operators.check_operator.SQLThresholdCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator`` - * [x] ``airflow.operators.check_operator.SQLValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` - * [x] ``airflow.operators.check_operator.CheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` - * [x] ``airflow.operators.check_operator.IntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` - * [x] ``airflow.operators.check_operator.ThresholdCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator`` - * [x] ``airflow.operators.check_operator.ValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` - * [x] ``airflow.operators.dagrun_operator.TriggerDagRunLink`` → ``airflow.operators.trigger_dagrun.TriggerDagRunLink`` - * [x] ``airflow.operators.dagrun_operator.TriggerDagRunOperator`` → ``airflow.operators.trigger_dagrun.TriggerDagRunOperator`` - * [x] ``airflow.operators.docker_operator.DockerOperator`` → ``airflow.providers.docker.operators.docker.DockerOperator`` - * [x] ``airflow.operators.druid_check_operator.DruidCheckOperator`` → ``airflow.providers.apache.druid.operators.druid_check.DruidCheckOperator`` - * [x] ``airflow.operators.gcs_to_s3.GCSToS3Operator`` → ``airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`` - * [x] ``airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Operator`` → ``airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator`` - * [x] ``airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer`` → ``airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator`` - * [x] ``airflow.operators.hive_operator.HiveOperator`` → ``airflow.providers.apache.hive.operators.hive.HiveOperator`` - * [x] ``airflow.operators.hive_stats_operator.HiveStatsCollectionOperator`` → ``airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator`` - * [x] ``airflow.operators.hive_to_druid.HiveToDruidOperator`` → ``airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`` - * [x] ``airflow.operators.hive_to_druid.HiveToDruidTransfer`` → ``airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`` - * [x] ``airflow.operators.hive_to_mysql.HiveToMySqlOperator`` → ``airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`` - * [x] ``airflow.operators.hive_to_mysql.HiveToMySqlTransfer`` → ``airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`` - * [x] ``airflow.operators.local_kubernetes_executor.HiveToSambaOperator`` → ``airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator`` - * [x] ``airflow.operators.hive_to_samba_operator.SimpleHttpOperator`` → ``airflow.providers.http.operators.http.SimpleHttpOperator`` - * [x] ``airflow.operators.jdbc_operator.JdbcOperator`` → ``airflow.providers.jdbc.operators.jdbc.JdbcOperator`` - * [x] ``airflow.operators.latest_only_operator.LatestOnlyOperator`` → ``airflow.operators.latest_only.LatestOnlyOperator`` - * [x] ``airflow.operators.mssql_operator.MsSqlOperator`` → ``airflow.providers.microsoft.mssql.operators.mssql.MsSqlOperator`` - * [x] ``airflow.operators.mssql_to_hive.MsSqlToHiveOperator`` → ``airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`` - * [x] ``airflow.operators.mssql_to_hive.MsSqlToHiveTransfer`` → ``airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`` - * [x] ``airflow.operators.mysql_operator.MySqlOperator`` → ``airflow.providers.mysql.operators.mysql.MySqlOperator`` - * [x] ``airflow.operators.mysql_to_hive.MySqlToHiveOperator`` → ``airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`` - * [x] ``airflow.operators.mysql_to_hive.MySqlToHiveTransfer`` → ``airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`` - * [x] ``airflow.operators.oracle_operator.OracleOperator`` → ``airflow.providers.oracle.operators.oracle.OracleOperator`` - * [x] ``airflow.operators.papermill_operator.PapermillOperator`` → ``airflow.providers.papermill.operators.papermill.PapermillOperator`` - * [x] ``airflow.operators.pig_operator.PigOperator`` → ``airflow.providers.apache.pig.operators.pig.PigOperator`` - * [x] ``airflow.operators.postgres_operator.Mapping`` → ``airflow.providers.postgres.operators.postgres.Mapping`` - * [x] ``airflow.operators.postgres_operator.PostgresOperator`` → ``airflow.providers.postgres.operators.postgres.PostgresOperator`` - * [x] ``airflow.operators.presto_check_operator.SQLCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` - * [x] ``airflow.operators.presto_check_operator.SQLIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` - * [x] ``airflow.operators.presto_check_operator.SQLValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` - * [x] ``airflow.operators.presto_check_operator.PrestoCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` - * [x] ``airflow.operators.presto_check_operator.PrestoIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` - * [x] ``airflow.operators.presto_check_operator.PrestoValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` - * [x] ``airflow.operators.presto_to_mysql.PrestoToMySqlOperator`` → ``airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`` - * [x] ``airflow.operators.presto_to_mysql.PrestoToMySqlTransfer`` → ``airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`` - * [x] ``airflow.operators.python_operator.BranchPythonOperator`` → ``airflow.operators.python.BranchPythonOperator`` - * [x] ``airflow.operators.python_operator.PythonOperator`` → ``airflow.operators.python.PythonOperator`` - * [x] ``airflow.operators.python_operator.PythonVirtualenvOperator`` → ``airflow.operators.python.PythonVirtualenvOperator`` - * [x] ``airflow.operators.python_operator.ShortCircuitOperator`` → ``airflow.operators.python.ShortCircuitOperator`` - * [x] ``airflow.operators.redshift_to_s3_operator.RedshiftToS3Operator`` → ``airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`` - * [x] ``airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer`` → ``airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`` - * [x] ``airflow.operators.s3_file_transform_operator.S3FileTransformOperator`` → ``airflow.providers.amazon.aws.operators.s3_file_transform.S3FileTransformOperator`` - * [x] ``airflow.operators.s3_to_hive_operator.S3ToHiveOperator`` → ``airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`` - * [x] ``airflow.operators.s3_to_hive_operator.S3ToHiveTransfer`` → ``airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`` - * [x] ``airflow.operators.s3_to_redshift_operator.S3ToRedshiftOperator`` → ``airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`` - * [x] ``airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer`` → ``airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`` - * [x] ``airflow.operators.slack_operator.SlackAPIOperator`` → ``airflow.providers.slack.operators.slack.SlackAPIOperator`` - * [x] ``airflow.operators.slack_operator.SlackAPIPostOperator`` → ``airflow.providers.slack.operators.slack.SlackAPIPostOperator`` - * [x] ``airflow.operators.sql.BaseSQLOperator`` → ``airflow.providers.common.sql.operators.sql.BaseSQLOperator`` - * [x] ``airflow.operators.sql.BranchSQLOperator`` → ``airflow.providers.common.sql.operators.sql.BranchSQLOperator`` - * [x] ``airflow.operators.sql.SQLCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` - * [x] ``airflow.operators.sql.SQLColumnCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLColumnCheckOperator`` - * [x] ``airflow.operators.sql.SQLIntervalCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLIntervalCheckOperator`` - * [x] ``airflow.operators.sql.SQLTableCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLTableCheckOperator`` - * [x] ``airflow.operators.sql.SQLThresholdCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLThresholdCheckOperator`` - * [x] ``airflow.operators.sql.SQLValueCheckOperator`` → ``airflow.providers.common.sql.operators.sql.SQLValueCheckOperator`` - * [x] ``airflow.operators.sql._convert_to_float_if_possible`` → ``airflow.providers.common.sql.operators.sql._convert_to_float_if_possible`` - * [x] ``airflow.operators.sql.parse_boolean`` → ``airflow.providers.common.sql.operators.sql.parse_boolean`` - * [x] ``airflow.operators.sql_branch_operator.BranchSQLOperator`` → ``airflow.providers.common.sql.operators.sql.BranchSQLOperator`` - * [x] ``airflow.operators.sql_branch_operator.BranchSqlOperator`` → ``airflow.providers.common.sql.operators.sql.BranchSQLOperator`` - * [x] ``airflow.operators.sqlite_operator.SqliteOperator`` → ``airflow.providers.sqlite.operators.sqlite.SqliteOperator`` - * [x] ``airflow.sensors.hive_partition_sensor.HivePartitionSensor`` → ``airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor`` - * [x] ``airflow.sensors.http_sensor.HttpSensor`` → ``airflow.providers.http.sensors.http.HttpSensor`` - * [x] ``airflow.sensors.metastore_partition_sensor.MetastorePartitionSensor`` → ``airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor`` - * [x] ``airflow.sensors.named_hive_partition_sensor.NamedHivePartitionSensor`` → ``airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor`` - * [x] ``airflow.sensors.s3_key_sensor.S3KeySensor`` → ``airflow.providers.amazon.aws.sensors.s3.S3KeySensor`` - * [x] ``airflow.sensors.sql.SqlSensor`` → ``airflow.providers.common.sql.sensors.sql.SqlSensor`` - * [x] ``airflow.sensors.sql_sensor.SqlSensor`` → ``airflow.providers.common.sql.sensors.sql.SqlSensor`` - * [x] ``airflow.sensors.web_hdfs_sensor.WebHdfsSensor`` → ``airflow.providers.apache.hdfs.sensors.web_hdfs.WebHdfsSensor`` - * [x] ``airflow.executors.kubernetes_executor_types.ALL_NAMESPACES`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.ALL_NAMESPACES`` - * [x] ``airflow.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY`` → ``airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY`` - * [x] ``airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES`` → ``airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES`` - * [x] ``airflow.executors.celery_executor.app`` → ``airflow.providers.celery.executors.celery_executor_utils.app`` - * [x] ``airflow.macros.hive.closest_ds_partition`` → ``airflow.providers.apache.hive.macros.hive.closest_ds_partition`` - * [x] ``airflow.macros.hive.max_partition`` → ``airflow.providers.apache.hive.macros.hive.max_partition`` diff --git a/airflow-core/newsfragments/41390.significant.rst b/airflow-core/newsfragments/41390.significant.rst deleted file mode 100644 index 5a88c547048fe..0000000000000 --- a/airflow-core/newsfragments/41390.significant.rst +++ /dev/null @@ -1,31 +0,0 @@ -Support for SubDags is removed - -Subdags have been removed from the following locations: - -- CLI -- API -- ``SubDagOperator`` - -This removal marks the end of Subdag support across all interfaces. Users -should transition to using TaskGroups as a more efficient and maintainable -alternative. Please ensure your DAGs are updated to -remove any usage of Subdags to maintain compatibility with future Airflow releases. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [x] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.operators.subdag.*`` diff --git a/airflow-core/newsfragments/41391.significant.rst b/airflow-core/newsfragments/41391.significant.rst deleted file mode 100644 index de169be0e4835..0000000000000 --- a/airflow-core/newsfragments/41391.significant.rst +++ /dev/null @@ -1,23 +0,0 @@ -The ``airflow.providers.standard.sensors.external_task.ExternalTaskSensorLink`` class has been removed. - -This class was deprecated and is no longer available. Users should now use -the ``airflow.providers.standard.sensors.external_task.ExternalDagLink`` class directly. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.sensors.external_task.ExternalTaskSensorLink`` → ``airflow.sensors.external_task.ExternalDagLink`` diff --git a/airflow-core/newsfragments/41393.significant.rst b/airflow-core/newsfragments/41393.significant.rst deleted file mode 100644 index 26b3724ca4aa0..0000000000000 --- a/airflow-core/newsfragments/41393.significant.rst +++ /dev/null @@ -1,34 +0,0 @@ -The ``use_task_execution_day`` parameter has been removed from the ``DayOfWeekSensor`` class. This parameter was previously deprecated in favor of ``use_task_logical_date``. - -If your code still uses ``use_task_execution_day``, you should update it to use ``use_task_logical_date`` -instead to ensure compatibility with future Airflow versions. - -Example update: - -.. code-block:: python - - sensor = DayOfWeekSensor( - task_id="example", - week_day="Tuesday", - use_task_logical_date=True, - dag=dag, - ) - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] arguments ``use_task_execution_day`` → ``use_task_logical_date`` in ``airflow.operators.weekday.DayOfWeekSensor`` diff --git a/airflow-core/newsfragments/41394.significant.rst b/airflow-core/newsfragments/41394.significant.rst deleted file mode 100644 index 49414a50b8e31..0000000000000 --- a/airflow-core/newsfragments/41394.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -The ``airflow.models.taskMixin.TaskMixin`` class has been removed. It was previously deprecated in favor of the ``airflow.models.taskMixin.DependencyMixin`` class. - -If your code relies on ``TaskMixin``, please update it to use ``DependencyMixin`` instead -to ensure compatibility with Airflow 3.0 and beyond. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41395.significant.rst b/airflow-core/newsfragments/41395.significant.rst deleted file mode 100644 index be9427b8b9d6f..0000000000000 --- a/airflow-core/newsfragments/41395.significant.rst +++ /dev/null @@ -1,33 +0,0 @@ -**Breaking Change** - -The following deprecated functions, constants, and classes have been removed as part of Airflow 3.0: - -- ``airflow.executors.executor_loader.UNPICKLEABLE_EXECUTORS``: No direct replacement; this constant is no longer needed. -- ``airflow.utils.dag_cycle_tester.test_cycle`` function: Use ``airflow.utils.dag_cycle_tester.check_cycle`` instead. -- ``airflow.utils.file.TemporaryDirectory`` function: Use ``tempfile.TemporaryDirectory`` instead. -- ``airflow.utils.file.mkdirs`` function: Use ``pathlib.Path.mkdir`` instead. -- ``airflow.utils.state.SHUTDOWN`` state: No action needed; this state is no longer used. -- ``airflow.utils.state.terminating_states`` constant: No action needed; this constant is no longer used. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.file.TemporaryDirectory`` → ``tempfile.TemporaryDirectory`` - * [x] ``airflow.utils.file.mkdirs`` → ``pathlib.Path({path}).mkdir`` - * [x] ``airflow.utils.dag_cycle_tester.test_cycle`` - * [x] ``airflow.utils.state.SHUTDOWN`` - * [x] ``airflow.utils.state.terminating_states`` diff --git a/airflow-core/newsfragments/41420.significant.rst b/airflow-core/newsfragments/41420.significant.rst deleted file mode 100644 index b2d05c97c60b4..0000000000000 --- a/airflow-core/newsfragments/41420.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Replaced Python's ``list`` with ``MutableSet`` for the property ``DAG.tags``. - -At the constractur you still can use list, -you actually can use any data structure that implements the -``Collection`` interface. - -The ``tags`` property of the ``DAG`` model would be of type -``MutableSet`` instead of ``list``, -as there are no actual duplicates at the tags. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41429.improvement.rst b/airflow-core/newsfragments/41429.improvement.rst deleted file mode 100644 index 6d04d5dfe61af..0000000000000 --- a/airflow-core/newsfragments/41429.improvement.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``run_with_db_retries`` when the scheduler updates the deferred Task as failed to tolerate database deadlock issues. diff --git a/airflow-core/newsfragments/41434.significant.rst b/airflow-core/newsfragments/41434.significant.rst deleted file mode 100644 index 7a4eed657d11d..0000000000000 --- a/airflow-core/newsfragments/41434.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -Experimental API is removed - -Experimental API is no longer available in Airflow. Users -should transition to using Rest API as an alternative. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41440.significant.rst b/airflow-core/newsfragments/41440.significant.rst deleted file mode 100644 index c23b6edfb5efc..0000000000000 --- a/airflow-core/newsfragments/41440.significant.rst +++ /dev/null @@ -1,28 +0,0 @@ -Removed unused methods / properties in ``airflow/models/dag.py`` - -* Methods removed - - * ``date_range`` - * ``is_fixed_time_schedule`` - * ``next_dagrun_after_date`` - * ``get_run_dates`` - * ``normalize_schedule`` - * ``full_filepath`` - * ``concurrency`` - * ``filepath`` - * ``concurrency_reached`` - * ``normalized_schedule_interval`` - * ``latest_execution_date`` - * ``set_dag_runs_state`` - * ``bulk_sync_to_db`` - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41453.significant.rst b/airflow-core/newsfragments/41453.significant.rst deleted file mode 100644 index 8ca0de3df3202..0000000000000 --- a/airflow-core/newsfragments/41453.significant.rst +++ /dev/null @@ -1,31 +0,0 @@ -Removed legacy scheduling arguments on DAG - -The ``schedule_interval`` and ``timetable`` arguments are removed from ``DAG``. - -The ``schedule_interval`` _attribute_ has also been removed. In the API, a new -``timetable_summary`` field has been added to replace ``schedule_interval`` for -presentation purposes. - -Since the DAG object no longer has the ``schedule_interval`` attribute, -OpenLineage facets that contain the ``dag`` key produced on Airflow 3.0 or -later will also no longer contain the field. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] arguments ``schedule_interval`` in ``DAG`` - * [x] arguments ``timetable`` in ``DAG`` diff --git a/airflow-core/newsfragments/41496.significant.rst b/airflow-core/newsfragments/41496.significant.rst deleted file mode 100644 index e19f1df46c5a7..0000000000000 --- a/airflow-core/newsfragments/41496.significant.rst +++ /dev/null @@ -1,26 +0,0 @@ -Removed deprecated methods in ``airflow/utils/dates.py`` - -* Methods removed - - * ``date_range`` - * ``days_ago`` (Use ``pendulum.today('UTC').add(days=-N, ...)``) - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.dates.date_range`` - * [x] ``airflow.utils.dates.days_ago`` → ``pendulum.today("UTC").add(days=-N, ...)`` diff --git a/airflow-core/newsfragments/41520.significant.rst b/airflow-core/newsfragments/41520.significant.rst deleted file mode 100644 index b2f4fe7351b0f..0000000000000 --- a/airflow-core/newsfragments/41520.significant.rst +++ /dev/null @@ -1,26 +0,0 @@ -Removed deprecated methods in ``airflow/utils/helpers.py`` - -* Methods removed: - - * ``chain`` (Use ``airflow.sdk.chain``) - * ``cross_downstream`` (Use ``airflow.sdk.cross_downstream``) - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.helpers.chain`` → ``airflow.sdk.chain`` - * [x] ``airflow.utils.helpers.cross_downstream`` → ``airflow.sdk.cross_downstream`` diff --git a/airflow-core/newsfragments/41533.significant.rst b/airflow-core/newsfragments/41533.significant.rst deleted file mode 100644 index 0f898f21ad332..0000000000000 --- a/airflow-core/newsfragments/41533.significant.rst +++ /dev/null @@ -1,45 +0,0 @@ -The ``load_connections`` function has been removed from the ``local_file_system``. - -This function was previously deprecated in favor of ``load_connections_dict``. - -If your code still uses ``load_connections``, you should update it to use ``load_connections_dict`` -instead to ensure compatibility with future Airflow versions. - -Example update: - -.. code-block:: python - - connection_by_conn_id = local_filesystem.load_connections_dict(file_path="a.json") - -The ``get_connections`` function has been removed from the ``LocalFilesystemBackend`` class. -This function was previously deprecated in favor of ``get_connection``. - -If your code still uses ``get_connections``, you should update it to use ``get_connection`` -instead to ensure compatibility with future Airflow versions. - - -Example update: - -.. code-block:: python - - connection_by_conn_id = LocalFilesystemBackend().get_connection(conn_id="conn_id") - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.secrets.local_filesystem.load_connections`` → ``airflow.secrets.local_filesystem.load_connections_dict`` - * [x] ``airflow.secrets.local_filesystem.LocalFilesystemBackend.get_connection`` → ``airflow.secrets.local_filesystem.LocalFilesystemBackend.load_connections_dict`` diff --git a/airflow-core/newsfragments/41539.significant.rst b/airflow-core/newsfragments/41539.significant.rst deleted file mode 100644 index 097dc6db75b05..0000000000000 --- a/airflow-core/newsfragments/41539.significant.rst +++ /dev/null @@ -1,19 +0,0 @@ -Removed deprecated ``smtp_user`` and ``smtp_password`` configuration parameters from ``smtp`` section. Please use smtp connection (``smtp_default``). - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``smtp.smtp_user`` - * [x] ``smtp.smtp_password`` diff --git a/airflow-core/newsfragments/41550.significant.rst b/airflow-core/newsfragments/41550.significant.rst deleted file mode 100644 index 352afcc9020ea..0000000000000 --- a/airflow-core/newsfragments/41550.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Removed deprecated ``session_lifetime_days`` and ``force_log_out_after`` configuration parameters from ``webserver`` section. Please use ``session_lifetime_minutes`` from ``fab`` section. - -Removed deprecated ``policy`` parameter from ``airflow_local_settings``. Please use ``task_policy``. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``webserver.session_lifetime_days`` → ``fab.session_lifetime_minutes`` - * [x] ``webserver.force_log_out_after`` → ``fab.session_lifetime_minutes`` - * [x] ``policy`` → ``task_policy`` diff --git a/airflow-core/newsfragments/41552.significant.rst b/airflow-core/newsfragments/41552.significant.rst deleted file mode 100644 index 99d6881e4032c..0000000000000 --- a/airflow-core/newsfragments/41552.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed deprecated ``filename_template`` argument from ``airflow.utils.log.file_task_handler.FileTaskHandler``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] argument ``filename_template`` in ``airflow.utils.log.file_task_handler.FileTaskHandler`` and its subclassses diff --git a/airflow-core/newsfragments/41564.significant.rst b/airflow-core/newsfragments/41564.significant.rst deleted file mode 100644 index eed922f86c1f1..0000000000000 --- a/airflow-core/newsfragments/41564.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -Move all time operators and sensors from airflow core to standard provider - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.operators.datetime.*`` → ``airflow.providers.standard.time.operators.datetime.*`` - * [x] ``airflow.operators.weekday.*`` → ``airflow.providers.standard.time.operators.weekday.*`` - * [x] ``airflow.sensors.date_time.*`` → ``airflow.providers.standard.time.sensors.date_time.*`` - * [x] ``airflow.sensors.time_sensor.*`` → ``airflow.providers.standard.time.sensors.time.*`` - * [x] ``airflow.sensors.time_delta.*`` → ``airflow.providers.standard.time.sensors.time_delta.*`` - * [x] ``airflow.sensors.weekday.*`` → ``airflow.providers.standard.time.sensors.weekday.*`` diff --git a/airflow-core/newsfragments/41579.significant.rst b/airflow-core/newsfragments/41579.significant.rst deleted file mode 100644 index d554b5b85303a..0000000000000 --- a/airflow-core/newsfragments/41579.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed deprecated ``apply_defaults`` function from ``airflow/utils/decorators.py``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.decorators.apply_defaults`` (auto applied) diff --git a/airflow-core/newsfragments/41609.significant.rst b/airflow-core/newsfragments/41609.significant.rst deleted file mode 100644 index b691aaea7d188..0000000000000 --- a/airflow-core/newsfragments/41609.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -Removed deprecated ``dependency_detector`` parameter from ``scheduler``. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``scheduler.dependency_detector`` diff --git a/airflow-core/newsfragments/41635.significant.rst b/airflow-core/newsfragments/41635.significant.rst deleted file mode 100644 index da3a6e719f4de..0000000000000 --- a/airflow-core/newsfragments/41635.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Removed deprecated ``--ignore-depends-on-past`` cli option from task command. Please use ``--depends-on-past ignore``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41642.significant.rst b/airflow-core/newsfragments/41642.significant.rst deleted file mode 100644 index a0748da360d10..0000000000000 --- a/airflow-core/newsfragments/41642.significant.rst +++ /dev/null @@ -1,23 +0,0 @@ -Removed deprecated secrets backend methods ``get_conn_uri`` and ``get_connections``. - -Please use ``get_conn_value`` and ``get_connection`` instead. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_uri`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_conn_value`` - * [x] ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connections`` → ``airflow.secrets.base_secrets.BaseSecretsBackend.get_connection`` diff --git a/airflow-core/newsfragments/41663.significant.rst b/airflow-core/newsfragments/41663.significant.rst deleted file mode 100644 index 2cfd9d3f2df27..0000000000000 --- a/airflow-core/newsfragments/41663.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed deprecated auth ``airflow.api.auth.backend.basic_auth`` from ``auth_backends``. Please use ``airflow.providers.fab.auth_manager.api.auth.backend.basic_auth`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.api.auth.backend.basic_auth`` → ``airflow.providers.fab.auth_manager.api.auth.backend.basic_auth`` diff --git a/airflow-core/newsfragments/41693.significant.rst b/airflow-core/newsfragments/41693.significant.rst deleted file mode 100644 index 89fbe92567725..0000000000000 --- a/airflow-core/newsfragments/41693.significant.rst +++ /dev/null @@ -1,21 +0,0 @@ -Removed deprecated auth ``airflow.api.auth.backend.kerberos_auth`` and ``airflow.auth.managers.fab.api.auth.backend.kerberos_auth`` from ``auth_backends``. Please use ``airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.api.auth.backend.kerberos_auth`` → ``airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth`` - * [x] ``airflow.auth.managers.fab.api.auth.backend.kerberos_auth`` → ``airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth`` diff --git a/airflow-core/newsfragments/41708.significant.rst b/airflow-core/newsfragments/41708.significant.rst deleted file mode 100644 index 5f14dd77d1107..0000000000000 --- a/airflow-core/newsfragments/41708.significant.rst +++ /dev/null @@ -1,21 +0,0 @@ -Removed deprecated auth manager ``airflow.auth.managers.fab.fab_auth_manager`` and ``airflow.auth.managers.fab.security_manager.override``. Please use ``airflow.providers.fab.auth_manager.security_manager.override`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.auth.managers.fab.fab_auth_manager`` → ``airflow.providers.fab.auth_manager.security_manager.override`` - * [x] ``airflow.auth.managers.fab.security_manager.override`` → ``airflow.providers.fab.auth_manager.security_manager.override`` diff --git a/airflow-core/newsfragments/41733.significant.rst b/airflow-core/newsfragments/41733.significant.rst deleted file mode 100644 index dda6856aad8b5..0000000000000 --- a/airflow-core/newsfragments/41733.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed deprecated function ``get_connections()`` function in ``airflow.hooks.base.BaseHook``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.hooks.base.BaseHook.get_connections`` → ``airflow.hooks.base.BaseHook.get_connection`` diff --git a/airflow-core/newsfragments/41735.significant.rst b/airflow-core/newsfragments/41735.significant.rst deleted file mode 100644 index 751ea773bf761..0000000000000 --- a/airflow-core/newsfragments/41735.significant.rst +++ /dev/null @@ -1,56 +0,0 @@ -Removed deprecated module ``airflow.kubernetes``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.kubernetes.kubernetes_helper_functions.add_pod_suffix`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.add_pod_suffix`` - * [x] ``airflow.kubernetes.kubernetes_helper_functions.annotations_for_logging_task_metadata`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.annotations_for_logging_task_metadata`` - * [x] ``airflow.kubernetes.kubernetes_helper_functions.annotations_to_key`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.annotations_to_key`` - * [x] ``airflow.kubernetes.kubernetes_helper_functions.create_pod_id`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.create_pod_id`` - * [x] ``airflow.kubernetes.kubernetes_helper_functions.get_logs_task_metadata`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.get_logs_task_metadata`` - * [x] ``airflow.kubernetes.kubernetes_helper_functions.rand_str`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str`` - * [x] ``airflow.kubernetes.pod.Port`` → ``kubernetes.client.models.V1ContainerPort`` - * [x] ``airflow.kubernetes.pod.Resources`` → ``kubernetes.client.models.V1ResourceRequirements`` - * [x] ``airflow.kubernetes.pod_launcher.PodLauncher`` → ``airflow.providers.cncf.kubernetes.pod_launcher.PodLauncher`` - * [x] ``airflow.kubernetes.pod_launcher.PodStatus`` → ``airflow.providers.cncf.kubernetes.pod_launcher.PodStatus`` - * [x] ``airflow.kubernetes.pod_launcher_deprecated.PodLauncher`` → ``airflow.providers.cncf.kubernetes.pod_launcher_deprecated.PodLauncher`` - * [x] ``airflow.kubernetes.pod_launcher_deprecated.PodStatus`` → ``airflow.providers.cncf.kubernetes.pod_launcher_deprecated.PodStatus`` - * [x] ``airflow.kubernetes.pod_launcher_deprecated.get_kube_client`` → ``airflow.providers.cncf.kubernetes.kube_client.get_kube_client`` - * [x] ``airflow.kubernetes.pod_launcher_deprecated.PodDefaults`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodDefaults`` - * [x] ``airflow.kubernetes.pod_runtime_info_env.PodRuntimeInfoEnv`` → ``kubernetes.client.models.V1EnvVar`` - * [x] ``airflow.kubernetes.volume.Volume`` → ``kubernetes.client.models.V1Volume`` - * [x] ``airflow.kubernetes.volume_mount.VolumeMount`` → ``kubernetes.client.models.V1VolumeMount`` - * [x] ``airflow.kubernetes.k8s_model.K8SModel`` → ``airflow.providers.cncf.kubernetes.k8s_model.K8SModel`` - * [x] ``airflow.kubernetes.k8s_model.append_to_pod`` → ``airflow.providers.cncf.kubernetes.k8s_model.append_to_pod`` - * [x] ``airflow.kubernetes.kube_client._disable_verify_ssl`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client._disable_verify_ssl`` - * [x] ``airflow.kubernetes.kube_client._enable_tcp_keepalive`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client._enable_tcp_keepalive`` - * [x] ``airflow.kubernetes.kube_client.get_kube_client`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client.get_kube_client`` - * [x] ``airflow.kubernetes.pod_generator.datetime_to_label_safe_datestring`` → ``airflow.providers.cncf.kubernetes.pod_generator.datetime_to_label_safe_datestring`` - * [x] ``airflow.kubernetes.pod_generator.extend_object_field`` → ``airflow.kubernetes.airflow.providers.cncf.kubernetes.pod_generator.extend_object_field`` - * [x] ``airflow.kubernetes.pod_generator.label_safe_datestring_to_datetime`` → ``airflow.providers.cncf.kubernetes.pod_generator.label_safe_datestring_to_datetime`` - * [x] ``airflow.kubernetes.pod_generator.make_safe_label_value`` → ``airflow.providers.cncf.kubernetes.pod_generator.make_safe_label_value`` - * [x] ``airflow.kubernetes.pod_generator.merge_objects`` → ``airflow.providers.cncf.kubernetes.pod_generator.merge_objects`` - * [x] ``airflow.kubernetes.pod_generator.PodGenerator`` → ``airflow.providers.cncf.kubernetes.pod_generator.PodGenerator`` - * [x] ``airflow.kubernetes.pod_generator.PodGeneratorDeprecated`` → ``airflow.providers.cncf.kubernetes.pod_generator.PodGenerator`` - * [x] ``airflow.kubernetes.pod_generator.PodDefaults`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodDefaults`` - * [x] ``airflow.kubernetes.pod_generator.add_pod_suffix`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.add_pod_suffix`` - * [x] ``airflow.kubernetes.pod_generator.rand_str`` → ``airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str`` - * [x] ``airflow.kubernetes.pod_generator_deprecated.make_safe_label_value`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.make_safe_label_value`` - * [x] ``airflow.kubernetes.pod_generator_deprecated.PodDefaults`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodDefaults`` - * [x] ``airflow.kubernetes.pod_generator_deprecated.PodGenerator`` → ``airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodGenerator`` - * [x] ``airflow.kubernetes.secret.Secret`` → ``airflow.providers.cncf.kubernetes.secret.Secret`` - * [x] ``airflow.kubernetes.secret.K8SModel`` → ``airflow.providers.cncf.kubernetes.k8s_model.K8SModel`` diff --git a/airflow-core/newsfragments/41736.significant.rst b/airflow-core/newsfragments/41736.significant.rst deleted file mode 100644 index 00f356217b59c..0000000000000 --- a/airflow-core/newsfragments/41736.significant.rst +++ /dev/null @@ -1,28 +0,0 @@ -Removed deprecated parameters from core-operators. - -Parameters removed: - -- ``airflow.operators.datetime.BranchDateTimeOperator``: ``use_task_execution_date`` -- ``airflow.operators.trigger_dagrun.TriggerDagRunOperator``: ``execution_date`` -- ``airflow.operators.weekday.BranchDayOfWeekOperator``: ``use_task_execution_day`` - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] argument ``execution_date`` in ``airflow.operators.trigger_dagrun.TriggerDagRunOperator`` - * [x] argument ``use_task_execution_day`` → ``use_task_logical_date`` in ``airflow.operators.datetime.BranchDateTimeOperator`` - * [x] argument ``use_task_execution_day`` → ``use_task_logical_date`` in ``airflow.operators.weekday.BranchDayOfWeekOperator`` diff --git a/airflow-core/newsfragments/41737.significant.rst b/airflow-core/newsfragments/41737.significant.rst deleted file mode 100644 index 1bf2ed6c13663..0000000000000 --- a/airflow-core/newsfragments/41737.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed deprecated ``TaskStateTrigger`` from ``airflow.triggers.external_task`` module. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.triggers.external_task.TaskStateTrigger`` diff --git a/airflow-core/newsfragments/41739.significant.rst b/airflow-core/newsfragments/41739.significant.rst deleted file mode 100644 index fc92582ee6d2a..0000000000000 --- a/airflow-core/newsfragments/41739.significant.rst +++ /dev/null @@ -1,14 +0,0 @@ -Removed backfill job command cli option ``ignore-first-depends-on-past``. Its value always set to True. No replcaement cli option. - -Removed backfill job command cli option ``treat-dag-as-regex``. Please use ``treat-dag-id-as-regex`` instead. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41748.significant.rst b/airflow-core/newsfragments/41748.significant.rst deleted file mode 100644 index 33546c3841995..0000000000000 --- a/airflow-core/newsfragments/41748.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Deprecated module ``airflow.hooks.dbapi`` removed. Please use ``airflow.providers.common.sql.hooks.sql`` instead. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.hooks.dbapi`` → ``airflow.providers.common.sql.hooks.sql`` diff --git a/airflow-core/newsfragments/41758.significant.rst b/airflow-core/newsfragments/41758.significant.rst deleted file mode 100644 index cf87332b94cfb..0000000000000 --- a/airflow-core/newsfragments/41758.significant.rst +++ /dev/null @@ -1,37 +0,0 @@ -Removed deprecated functions and modules from ``airflow.www`` module. - -- Config flag default warning ``cookie_samesite`` option in section ``[webserver]`` removed. -- Legacy decorator ``@has_access`` in ``airflow.www.auth``: Please use one of the decorator ``has_access_*`` - defined in airflow/www/auth.py instead. -- Removed legacy modules ``airflow.www.security``: Should be inherited from - ``airflow.providers.fab.auth_manager.security_manager.override.FabAirflowSecurityManagerOverride`` instead. - The constant value ``EXISTING_ROLES`` should be used from ``airflow.www.security_manager`` module. -- Removed the method ``get_sensitive_variables_fields()`` from ``airflow.www.utils``: Please use - ``airflow.utils.log.secrets_masker.get_sensitive_variables_fields`` instead. -- Removed the method ``should_hide_value_for_key()`` from ``airflow.www.utils``: Please use - ``airflow.utils.log.secrets_masker.should_hide_value_for_key`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.www.auth.has_access`` → ``airflow.www.auth.has_access_*`` - * [x] ``airflow.www.utils.get_sensitive_variables_fields`` → ``airflow.utils.log.secrets_masker.get_sensitive_variables_fields`` - * [x] ``airflow.www.utils.should_hide_value_for_key`` → ``airflow.utils.log.secrets_masker.should_hide_value_for_key`` - - * AIR303 - - * [x] ``airflow.www.security.FabAirflowSecurityManagerOverride`` → ``airflow.providers.fab.auth_manager.security_manager.override.FabAirflowSecurityManagerOverride`` diff --git a/airflow-core/newsfragments/41761.significant.rst b/airflow-core/newsfragments/41761.significant.rst deleted file mode 100644 index 20b472783e142..0000000000000 --- a/airflow-core/newsfragments/41761.significant.rst +++ /dev/null @@ -1,32 +0,0 @@ -Removed a set of deprecations in BaseOperator. - -- Parameter ``task_concurrency`` removed, please use ``max_active_tis_per_dag``. -- Support for additional (not defined) arguments removed. -- Support for trigger rule ``dummy`` removed. Please use ``always``. -- Support for trigger rule ``none_failed_or_skipped`` removed. Please use ``none_failed_min_one_success``. -- Support to load ``BaseOperatorLink`` via ``airflow.models.baseoperator`` module removed. -- Config ``operators.allow_illegal_arguments`` removed. - -* Types of change - - * [x] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``operators.allow_illegal_arguments`` - - * ruff - - * AIR302 - - * [x] ``airflow.utils.trigger_rule.TriggerRule.NONE_FAILED_OR_SKIPPED`` - * [x] argument ``task_concurrency`` → ``max_active_tis_per_dag`` in ``BaseOperator`` and its subclassses diff --git a/airflow-core/newsfragments/41762.significant.rst b/airflow-core/newsfragments/41762.significant.rst deleted file mode 100644 index a2d6271a247ed..0000000000000 --- a/airflow-core/newsfragments/41762.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -Removed a set of deprecations in ``Connection`` from ``airflow.models``. - -- Validation of extra fields is now enforcing that JSON values are provided. If a non-JSON value is provided - a ValueError will be raised. -- Removed utility method ``parse_netloc_to_hostname()`` -- Removed utility method ``parse_from_uri()``. -- Removed utility method ``log_info()`` and ``debug_info()``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41774.significant.rst b/airflow-core/newsfragments/41774.significant.rst deleted file mode 100644 index 56ed3a8633a01..0000000000000 --- a/airflow-core/newsfragments/41774.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Removed a set of deprecations in ``DAG`` from ``airflow.models``. - -- Removed deprecated parameters ``full_filepath`` and ``concurrency`` (Replaced by ``max_active_tasks``) from DAG and ``@dag`` decorator. -- Removed legacy support for permissions named ``can_dag_read`` and ``can_dag_edit``. The permissions need to be named ``can_read`` and ``can_edit``. -- Removed legacy deprecated functions ``following_schedule()`` and ``previous_schedule``. -- Removed deprecated support for ``datetime`` in ``next_dagrun_info()``. Use ``DataInterval``. -- Removed legacy DAG property ``is_paused``. Please use ``get_is_paused`` instead. -- Removed legacy parameters ``get_tis``, ``recursion_depth`` and ``max_recursion_depth`` from ``DAG.clear()``. -- Removed implicit support to call ``create_dagrun()`` without data interval. -- Removed support for deprecated parameter ``concurrency`` in ``DagModel``. -- Removed support for ``datetime`` in ``DagModel.calculate_dagrun_date_fields``. Use ``DataInterval``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes diff --git a/airflow-core/newsfragments/41776.significant.rst b/airflow-core/newsfragments/41776.significant.rst deleted file mode 100644 index 0f8f53648da11..0000000000000 --- a/airflow-core/newsfragments/41776.significant.rst +++ /dev/null @@ -1,16 +0,0 @@ -Removed a set of deprecations in from ``airflow.models.param``. - -- Removed deprecated direct access to DagParam as module. Please import from ``airflow.models.param``. -- Ensure all param values are JSON serialiazable and raise a ``ParamValidationError`` if not. -- Ensure parsed date and time values are RFC3339 compliant. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41778.significant.rst b/airflow-core/newsfragments/41778.significant.rst deleted file mode 100644 index 017ec59d3a2c8..0000000000000 --- a/airflow-core/newsfragments/41778.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -Removed a set of deprecations in from ``airflow.models.dagrun``. - -- Removed deprecated method ``DagRun.get_run()``. Instead you should use standard Sqlalchemy DagRun model retrieval. -- Removed deprecated method ``DagRun.get_log_filename_template()``. Please use ``get_log_template()`` instead. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41779.significant.rst b/airflow-core/newsfragments/41779.significant.rst deleted file mode 100644 index 0c2861a856891..0000000000000 --- a/airflow-core/newsfragments/41779.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Remove deprecated support for ``airflow.models.errors.ImportError`` which has been renamed to ``ParseImportError``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41780.significant.rst b/airflow-core/newsfragments/41780.significant.rst deleted file mode 100644 index 453c961d5af8e..0000000000000 --- a/airflow-core/newsfragments/41780.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Remove deprecated support for passing ``execution_date`` to ``airflow.models.skipmixin.SkipMixin.skip()``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41784.significant.rst b/airflow-core/newsfragments/41784.significant.rst deleted file mode 100644 index 86669b5c011af..0000000000000 --- a/airflow-core/newsfragments/41784.significant.rst +++ /dev/null @@ -1,23 +0,0 @@ -Removed a set of deprecations in from ``airflow.models.taskinstance``. - - - Removed deprecated arg ``activate_dag_runs`` from ``TaskInstance.clear_task_instances()``. Please use ``dag_run_state`` instead. - - Removed deprecated arg ``execution_date`` from ``TaskInstance.__init__()``. Please use ``run_id`` instead. - - Removed deprecated property ``_try_number`` from ``TaskInstance``. Please use ``try_number`` instead. - - Removed deprecated property ``prev_attempted_tries`` from ``TaskInstance``. Please use ``try_number`` instead. - - Removed deprecated property ``next_try_number`` from ``TaskInstance``. Please use ``try_number + 1`` instead. - - Removed deprecated property ``previous_ti`` from ``TaskInstance``. Please use ``get_previous_ti`` instead. - - Removed deprecated property ``previous_ti_success`` from ``TaskInstance``. Please use ``get_previous_ti`` instead. - - Removed deprecated property ``previous_start_date_success`` from ``TaskInstance``. Please use ``get_previous_start_date`` instead. - - Removed deprecated function ``as_dict`` from ``SimpleTaskInstance``. Please use ``BaseSerialization.serialize`` instead. - - Removed deprecated function ``from_dict`` from ``SimpleTaskInstance``. Please use ``BaseSerialization.deserialize`` instead. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41808.significant.rst b/airflow-core/newsfragments/41808.significant.rst deleted file mode 100644 index 38b66a9959588..0000000000000 --- a/airflow-core/newsfragments/41808.significant.rst +++ /dev/null @@ -1,19 +0,0 @@ -Removed deprecations in ``airflow.models.taskreschedule``. - -Removed methods: - -- ``query_for_task_instance()`` -- ``find_for_task_instance()`` - -Note: there are no replacements. Direct access to DB is discouraged and will not be possible in Airflow 3 for tasks. The public REST API is the future way to interact with Airflow - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes diff --git a/airflow-core/newsfragments/41857.significant.rst b/airflow-core/newsfragments/41857.significant.rst deleted file mode 100644 index fd88c97bfa522..0000000000000 --- a/airflow-core/newsfragments/41857.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Airflow core now depends on Pydantic v2. If you have Pydantic v1 installed, please upgrade. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41910.significant.rst b/airflow-core/newsfragments/41910.significant.rst deleted file mode 100644 index 313291486f2e4..0000000000000 --- a/airflow-core/newsfragments/41910.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed deprecated method ``requires_access`` from module ``airflow.api_connexion.security``. Please use ``requires_access_*`` instead. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.api_connexion.security.requires_access`` → ``airflow.api_connexion.security.requires_access_*`` diff --git a/airflow-core/newsfragments/41964.significant.rst b/airflow-core/newsfragments/41964.significant.rst deleted file mode 100644 index f004b61636733..0000000000000 --- a/airflow-core/newsfragments/41964.significant.rst +++ /dev/null @@ -1,17 +0,0 @@ -``--tree`` flag for ``airflow tasks list`` command removed - -The format of the output with that flag can be expensive to generate and extremely large, depending on the DAG. -``airflow dag show`` is a better way to visualize the relationship of tasks in a DAG. - -``DAG.tree_view`` and ``DAG.get_tree_view`` have also been removed. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/41975.significant.rst b/airflow-core/newsfragments/41975.significant.rst deleted file mode 100644 index 1569035d713ed..0000000000000 --- a/airflow-core/newsfragments/41975.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -Metrics basic deprecated validators (``AllowListValidator`` and ``BlockListValidator``) were removed in favor of pattern matching. Pattern matching validators (``PatternAllowListValidator`` and ``PatternBlockListValidator``) are enabled by default. Configuration parameter ``metrics_use_pattern_match``was removed from the ``metrics`` section. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config liint`` - - * [x] ``metrics.metrics_use_pattern_match`` - - * ruff - - * AIR302 - - * [x] ``airflow.metrics.validators.AllowListValidator`` → ``airflow.metrics.validators.PatternAllowListValidator`` - * [x] ``airflow.metrics.validators.BlockListValidator`` → ``airflow.metrics.validators.PatternBlockListValidator`` diff --git a/airflow-core/newsfragments/42023.significant.rst b/airflow-core/newsfragments/42023.significant.rst deleted file mode 100644 index 48e8f7344da53..0000000000000 --- a/airflow-core/newsfragments/42023.significant.rst +++ /dev/null @@ -1,19 +0,0 @@ -Rename ``Dataset`` as ``Asset`` in API endpoints - -* list of changes - - * Rename property run_type value ``dataset_triggered`` as ``asset_triggered`` in DAGRun endpoint - * Rename property ``dataset_expression`` as ``asset_expression`` in DAGDetail endpoint - * Change the string ``dataset_triggered`` in RUN_ID_REGEX as ``asset_triggered`` which affects the valid run id that an user can provide - * Rename ``dataset`` as ``asset`` in all the database tables - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42042.significant.rst b/airflow-core/newsfragments/42042.significant.rst deleted file mode 100644 index a743da5326c9d..0000000000000 --- a/airflow-core/newsfragments/42042.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Removed ``is_active`` property from ``BaseUser``. This property is longer used. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes diff --git a/airflow-core/newsfragments/42054.significant.rst b/airflow-core/newsfragments/42054.significant.rst deleted file mode 100644 index dbe243df3854e..0000000000000 --- a/airflow-core/newsfragments/42054.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -Dataset and DatasetAlias are no longer hashable - -This means they can no longer be used as dict keys or put into a set. Dataset's -equality logic is also tweaked slightly to consider the extra dict. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42060.significant.rst b/airflow-core/newsfragments/42060.significant.rst deleted file mode 100644 index fc806729e0ec0..0000000000000 --- a/airflow-core/newsfragments/42060.significant.rst +++ /dev/null @@ -1,21 +0,0 @@ -Removed deprecated configuration ``stalled_task_timeout`` from ``celery``, ``task_adoption_timeout`` from ``celery`` and ``worker_pods_pending_timeout`` from ``kubernetes_executor``. Please use ``task_queued_timeout`` from ``scheduler`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``celery.stalled_task_timeout`` - * [x] ``kubernetes_executor.worker_pods_pending_timeout`` → ``scheduler.task_queued_timeout`` - * [x] ``celery.task_adoption_timeout`` → ``scheduler.task_queued_timeout`` - * [x] ``kubernetes_executor.worker_pods_pending_timeout`` → ``scheduler.task_queued_timeout`` diff --git a/airflow-core/newsfragments/42088.significant.rst b/airflow-core/newsfragments/42088.significant.rst deleted file mode 100644 index db2d65c3bd882..0000000000000 --- a/airflow-core/newsfragments/42088.significant.rst +++ /dev/null @@ -1,42 +0,0 @@ -Removed deprecated metrics configuration. - - * Removed deprecated configuration ``statsd_allow_list`` from ``metrics``. Please use ``metrics_allow_list`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_block_list`` from ``metrics``. Please use ``metrics_block_list`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_on`` from ``scheduler``. Please use ``statsd_on`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_host`` from ``scheduler``. Please use ``statsd_host`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_port`` from ``scheduler``. Please use ``statsd_port`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_prefix`` from ``scheduler``. Please use ``statsd_prefix`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_allow_list`` from ``scheduler``. Please use ``statsd_allow_list`` from ``metrics`` instead. - * Removed deprecated configuration ``stat_name_handler`` from ``scheduler``. Please use ``stat_name_handler`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_datadog_enabled`` from ``scheduler``. Please use ``statsd_datadog_enabled`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_datadog_tags`` from ``scheduler``. Please use ``statsd_datadog_tags`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_datadog_metrics_tags`` from ``scheduler``. Please use ``statsd_datadog_metrics_tags`` from ``metrics`` instead. - * Removed deprecated configuration ``statsd_custom_client_path`` from ``scheduler``. Please use ``statsd_custom_client_path`` from ``metrics`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``metrics.statsd_allow_list`` → ``metrics.metrics_allow_list`` - * [x] ``metrics.statsd_block_list`` → ``metrics.metrics_block_list`` - * [x] ``scheduler.statsd_on`` → ``metrics.statsd_on`` - * [x] ``scheduler.statsd_host`` → ``metrics.statsd_host`` - * [x] ``scheduler.statsd_port`` → ``metrics.statsd_port`` - * [x] ``scheduler.statsd_prefix`` → ``metrics.statsd_prefix`` - * [x] ``scheduler.statsd_allow_list`` → ``metrics.statsd_allow_list`` - * [x] ``scheduler.stat_name_handler`` → ``metrics.stat_name_handler`` - * [x] ``scheduler.statsd_datadog_enabled`` → ``metrics.statsd_datadog_enabled`` - * [x] ``scheduler.statsd_datadog_tags`` → ``metrics.statsd_datadog_tags`` - * [x] ``scheduler.statsd_datadog_metrics_tags`` → ``metrics.statsd_datadog_metrics_tags`` - * [x] ``scheduler.statsd_custom_client_path`` → ``metrics.statsd_custom_client_path`` diff --git a/airflow-core/newsfragments/42100.significant.rst b/airflow-core/newsfragments/42100.significant.rst deleted file mode 100644 index 801626a21c158..0000000000000 --- a/airflow-core/newsfragments/42100.significant.rst +++ /dev/null @@ -1,56 +0,0 @@ -Removed deprecated logging configuration. - - * Removed deprecated configuration ``interleave_timestamp_parser`` from ``core``. Please use ``interleave_timestamp_parser`` from ``logging`` instead. - * Removed deprecated configuration ``base_log_folder`` from ``core``. Please use ``base_log_folder`` from ``logging`` instead. - * Removed deprecated configuration ``remote_logging`` from ``core``. Please use ``remote_logging`` from ``logging`` instead. - * Removed deprecated configuration ``remote_log_conn_id`` from ``core``. Please use ``remote_log_conn_id`` from ``logging`` instead. - * Removed deprecated configuration ``remote_base_log_folder`` from ``core``. Please use ``remote_base_log_folder`` from ``logging`` instead. - * Removed deprecated configuration ``encrypt_s3_logs`` from ``core``. Please use ``encrypt_s3_logs`` from ``logging`` instead. - * Removed deprecated configuration ``logging_level`` from ``core``. Please use ``logging_level`` from ``logging`` instead. - * Removed deprecated configuration ``fab_logging_level`` from ``core``. Please use ``fab_logging_level`` from ``logging`` instead. - * Removed deprecated configuration ``logging_config_class`` from ``core``. Please use ``logging_config_class`` from ``logging`` instead. - * Removed deprecated configuration ``colored_console_log`` from ``core``. Please use ``colored_console_log`` from ``logging`` instead. - * Removed deprecated configuration ``colored_log_format`` from ``core``. Please use ``colored_log_format`` from ``logging`` instead. - * Removed deprecated configuration ``colored_formatter_class`` from ``core``. Please use ``colored_formatter_class`` from ``logging`` instead. - * Removed deprecated configuration ``log_format`` from ``core``. Please use ``log_format`` from ``logging`` instead. - * Removed deprecated configuration ``simple_log_format`` from ``core``. Please use ``simple_log_format`` from ``logging`` instead. - * Removed deprecated configuration ``task_log_prefix_template`` from ``core``. Please use ``task_log_prefix_template`` from ``logging`` instead. - * Removed deprecated configuration ``log_filename_template`` from ``core``. Please use ``log_filename_template`` from ``logging`` instead. - * Removed deprecated configuration ``log_processor_filename_template`` from ``core``. Please use ``log_processor_filename_template`` from ``logging`` instead. - * Removed deprecated configuration ``dag_processor_manager_log_location`` from ``core``. Please use ``dag_processor_manager_log_location`` from ``logging`` instead. - * Removed deprecated configuration ``task_log_reader`` from ``core``. Please use ``task_log_reader`` from ``logging`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``core.interleave_timestamp_parser`` → ``logging.interleave_timestamp_parser`` - * [x] ``core.base_log_folder`` → ``logging.base_log_folder`` - * [x] ``core.remote_logging`` → ``logging.remote_logging`` - * [x] ``core.remote_log_conn_id`` → ``logging.remote_log_conn_id`` - * [x] ``core.remote_base_log_folder`` → ``logging.remote_base_log_folder`` - * [x] ``core.encrypt_s3_logs`` → ``logging.encrypt_s3_logs`` - * [x] ``core.logging_level`` → ``logging.logging_level`` - * [x] ``core.fab_logging_level`` → ``logging.fab_logging_level`` - * [x] ``core.logging_config_class`` → ``logging.logging_config_class`` - * [x] ``core.colored_console_log`` → ``logging.colored_console_log`` - * [x] ``core.colored_log_format`` → ``logging.colored_log_format`` - * [x] ``core.colored_formatter_class`` → ``logging.colored_formatter_class`` - * [x] ``core.log_format`` → ``logging.log_format`` - * [x] ``core.simple_log_format`` → ``logging.simple_log_format`` - * [x] ``core.task_log_prefix_template`` → ``logging.task_log_prefix_template`` - * [x] ``core.log_filename_template`` → ``logging.log_filename_template`` - * [x] ``core.log_processor_filename_template`` → ``logging.log_processor_filename_template`` - * [x] ``core.dag_processor_manager_log_location`` → ``logging.dag_processor_manager_log_location`` - * [x] ``core.task_log_reader`` → ``logging.task_log_reader`` diff --git a/airflow-core/newsfragments/42126.significant.rst b/airflow-core/newsfragments/42126.significant.rst deleted file mode 100644 index e1b6f431ac4fb..0000000000000 --- a/airflow-core/newsfragments/42126.significant.rst +++ /dev/null @@ -1,43 +0,0 @@ -Removed deprecated database configuration. - - * Removed deprecated configuration ``sql_alchemy_conn`` from ``core``. Please use ``sql_alchemy_conn`` from ``database`` instead. - * Removed deprecated configuration ``sql_engine_encoding`` from ``core``. Please use ``sql_engine_encoding`` from ``database`` instead. - * Removed deprecated configuration ``sql_engine_collation_for_ids`` from ``core``. Please use ``sql_engine_collation_for_ids`` from ``database`` instead. - * Removed deprecated configuration ``sql_alchemy_pool_enabled`` from ``core``. Please use ``sql_alchemy_pool_enabled`` from ``database`` instead. - * Removed deprecated configuration ``sql_alchemy_pool_size`` from ``core``. Please use ``sql_alchemy_pool_size`` from ``database`` instead. - * Removed deprecated configuration ``sql_alchemy_max_overflow`` from ``core``. Please use ``sql_alchemy_max_overflow`` from ``database`` instead. - * Removed deprecated configuration ``sql_alchemy_pool_recycle`` from ``core``. Please use ``sql_alchemy_pool_recycle`` from ``database`` instead. - * Removed deprecated configuration ``sql_alchemy_pool_pre_ping`` from ``core``. Please use ``sql_alchemy_pool_pre_ping`` from ``database`` instead. - * Removed deprecated configuration ``sql_alchemy_schema`` from ``core``. Please use ``sql_alchemy_schema`` from ``database`` instead. - * Removed deprecated configuration ``sql_alchemy_connect_args`` from ``core``. Please use ``sql_alchemy_connect_args`` from ``database`` instead. - * Removed deprecated configuration ``load_default_connections`` from ``core``. Please use ``load_default_connections`` from ``database`` instead. - * Removed deprecated configuration ``max_db_retries`` from ``core``. Please use ``max_db_retries`` from ``database`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - - * ``airflow config lint`` - - * [x] ``core.sql_alchemy_conn`` → ``database.sql_alchemy_conn`` - * [x] ``core.sql_engine_encoding`` → ``database.sql_engine_encoding`` - * [x] ``core.sql_engine_collation_for_ids`` → ``database.sql_engine_collation_for_ids`` - * [x] ``core.sql_alchemy_pool_enabled`` → ``database.sql_alchemy_pool_enabled`` - * [x] ``core.sql_alchemy_pool_size`` → ``database.sql_alchemy_pool_size`` - * [x] ``core.sql_alchemy_max_overflow`` → ``database.sql_alchemy_max_overflow`` - * [x] ``core.sql_alchemy_pool_recycle`` → ``database.sql_alchemy_pool_recycle`` - * [x] ``core.sql_alchemy_pool_pre_ping`` → ``database.sql_alchemy_pool_pre_ping`` - * [x] ``core.sql_alchemy_schema`` → ``database.sql_alchemy_schema`` - * [x] ``core.sql_alchemy_connect_args`` → ``database.sql_alchemy_connect_args`` - * [x] ``core.load_default_connections`` → ``database.load_default_connections`` - * [x] ``core.max_db_retries`` → ``database.max_db_retries`` diff --git a/airflow-core/newsfragments/42129.significant.rst b/airflow-core/newsfragments/42129.significant.rst deleted file mode 100644 index 6f15f46fede7d..0000000000000 --- a/airflow-core/newsfragments/42129.significant.rst +++ /dev/null @@ -1,48 +0,0 @@ -Removed deprecated configuration. - - * Removed deprecated configuration ``worker_precheck`` from ``core``. Please use ``worker_precheck`` from ``celery`` instead. - * Removed deprecated configuration ``max_threads`` from ``scheduler``. Please use ``parsing_processes`` from ``dag_processor`` instead. - * Removed deprecated configuration ``default_queue`` from ``celery``. Please use ``default_queue`` from ``operators`` instead. - * Removed deprecated configuration ``hide_sensitive_variable_fields`` from ``admin``. Please use ``hide_sensitive_var_conn_fields`` from ``core`` instead. - * Removed deprecated configuration ``sensitive_variable_fields`` from ``admin``. Please use ``sensitive_var_conn_names`` from ``core`` instead. - * Removed deprecated configuration ``non_pooled_task_slot_count`` from ``core``. Please use ``default_pool_task_slot_count`` from ``core`` instead. - * Removed deprecated configuration ``dag_concurrency`` from ``core``. Please use ``max_active_tasks_per_dag`` from ``core`` instead. - * Removed deprecated configuration ``access_control_allow_origin`` from ``api``. Please use ``access_control_allow_origins`` from ``api`` instead. - * Removed deprecated configuration ``auth_backend`` from ``api``. Please use ``auth_backends`` from ``api`` instead. - * Removed deprecated configuration ``deactivate_stale_dags_interval`` from ``scheduler``. Please use ``parsing_cleanup_interval`` from ``scheduler`` instead. - * Removed deprecated configuration ``worker_pods_pending_timeout_check_interval`` from ``kubernetes_executor``. Please use ``task_queued_timeout_check_interval`` from ``scheduler`` instead. - * Removed deprecated configuration ``update_fab_perms`` from ``webserver``. Please use ``update_fab_perms`` from ``fab`` instead. - * Removed deprecated configuration ``auth_rate_limited`` from ``webserver``. Please use ``auth_rate_limited`` from ``fab`` instead. - * Removed deprecated configuration ``auth_rate_limit`` from ``webserver``. Please use ``auth_rate_limit`` from ``fab`` instead. - * Removed deprecated configuration section ``kubernetes``. Please use ``kubernetes_executor`` instead. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``core.worker_precheck`` → ``celery.worker_precheck`` - * [x] ``scheduler.max_threads`` → ``dag_processor.parsing_processes`` - * [x] ``celery.default_queue`` → ``operators.default_queue`` - * [x] ``admin.hide_sensitive_variable_fields`` → ``core.hide_sensitive_var_conn_fields`` - * [x] ``admin.sensitive_variable_fields`` → ``core.sensitive_var_conn_names`` - * [x] ``core.non_pooled_task_slot_count`` → ``core.default_pool_task_slot_count`` - * [x] ``core.dag_concurrency`` → ``core.max_active_tasks_per_dag`` - * [x] ``api.access_control_allow_origin`` → ``api.access_control_allow_origins`` - * [x] ``api.auth_backend`` → ``api.auth_backends`` - * [x] ``scheduler.deactivate_stale_dags_interval`` → ``scheduler.parsing_cleanup_interval`` - * [x] ``kubernetes_executor.worker_pods_pending_timeout_check_interval`` → ``scheduler.task_queued_timeout_check_interval`` - * [x] ``webserver.update_fab_perms`` → ``fab.update_fab_perms`` - * [x] ``webserver.auth_rate_limited`` → ``fab.auth_rate_limited`` - * [x] ``webserver.auth_rate_limit`` → ``fab.auth_rate_limit`` - * [x] ``kubernetes`` → ``kubernetes_executor`` diff --git a/airflow-core/newsfragments/42137.significant.rst b/airflow-core/newsfragments/42137.significant.rst deleted file mode 100644 index 1055032dc2a64..0000000000000 --- a/airflow-core/newsfragments/42137.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Optional ``[saml]`` extra has been removed from Airflow core - instead Amazon Provider gets saml as required dependency. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42252.significant.rst b/airflow-core/newsfragments/42252.significant.rst deleted file mode 100644 index 2533fb451b498..0000000000000 --- a/airflow-core/newsfragments/42252.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Move bash operators from airflow core to standard provider - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.operators.bash.BashOperator`` → ``airflow.providers.standard.operators.bash.BashOperator`` diff --git a/airflow-core/newsfragments/42280.significant.rst b/airflow-core/newsfragments/42280.significant.rst deleted file mode 100644 index 35e37727cda6a..0000000000000 --- a/airflow-core/newsfragments/42280.significant.rst +++ /dev/null @@ -1,16 +0,0 @@ -Removed deprecated Rest API endpoints: - -* /api/v1/roles. Use /auth/fab/v1/roles instead -* /api/v1/permissions. Use /auth/fab/v1/permissions instead -* /api/v1/users. Use /auth/fab/v1/users instead - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42285.significant.rst b/airflow-core/newsfragments/42285.significant.rst deleted file mode 100644 index c7a545664b484..0000000000000 --- a/airflow-core/newsfragments/42285.significant.rst +++ /dev/null @@ -1,27 +0,0 @@ -The SLA feature is removed in Airflow 3.0, to be replaced with Airflow Alerts in 3.1 - -* Types of change - - * [x] Dag changes - * [x] Config changes - * [x] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``core.check_slas`` - - * ruff - - * AIR302 - - * [x] argument ``sla`` in ``BaseOperator`` and its subclassses - * [x] argument ``sla_miss_callback`` in ``DAG`` diff --git a/airflow-core/newsfragments/42343.feature.rst b/airflow-core/newsfragments/42343.feature.rst deleted file mode 100644 index 8a7cdf335a06e..0000000000000 --- a/airflow-core/newsfragments/42343.feature.rst +++ /dev/null @@ -1 +0,0 @@ -New function ``create_dataset_aliases`` added to DatasetManager for DatasetAlias creation. diff --git a/airflow-core/newsfragments/42343.significant.rst b/airflow-core/newsfragments/42343.significant.rst deleted file mode 100644 index 7af4954e265cc..0000000000000 --- a/airflow-core/newsfragments/42343.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -``DatasetManager.create_datasets`` now takes ``Dataset`` objects - -This function previously accepts a list of ``DatasetModel`` objects. it now -receives ``Dataset`` objects instead. A list of ``DatasetModel`` objects are -created inside, and returned by the function. - -Also, the ``session`` argument is now keyword-only. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42404.significant.rst b/airflow-core/newsfragments/42404.significant.rst deleted file mode 100644 index da57b01bde197..0000000000000 --- a/airflow-core/newsfragments/42404.significant.rst +++ /dev/null @@ -1,19 +0,0 @@ -Removed ``logical_date`` arguments from functions and APIs for DAG run lookups to align with Airflow 3.0. - -The shift towards using ``run_id`` as the sole identifier for DAG runs eliminates the limitations of ``execution_date`` and ``logical_date``, particularly for dynamic DAG runs and cases where multiple runs occur at the same logical time. This change impacts database models, templates, and functions: - -- Removed ``logical_date`` arguments from public APIs and Python functions related to DAG run lookups. -- ``run_id`` is now the exclusive identifier for DAG runs in these contexts. -- ``ds``, ``ds_nodash``, ``ts``, ``ts_nodash``, ``ts_nodash_with_tz`` (and ``logical_date``) will no longer exist for non-scheduled DAG runs (i.e. manually triggered runs) -- ``task_instance_key_str`` template variable has changed to use ``run_id``, not the logical_date. This means the value of it will change compared to 2.x, even for old runs - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42436.significant.rst b/airflow-core/newsfragments/42436.significant.rst deleted file mode 100644 index f3c0117e641a5..0000000000000 --- a/airflow-core/newsfragments/42436.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -Default ``.airflowignore`` syntax changed to ``glob`` - -The default value to the configuration ``[core] dag_ignore_file_syntax`` has -been changed to ``glob``, which better matches the ignore file behavior of many -popular tools. - -To revert to the previous behavior, set the configuration to ``regexp``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42548.significant.rst b/airflow-core/newsfragments/42548.significant.rst deleted file mode 100644 index 93a5a7db417e4..0000000000000 --- a/airflow-core/newsfragments/42548.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Remove is_backfill attribute from DagRun object - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42579.significant.rst b/airflow-core/newsfragments/42579.significant.rst deleted file mode 100644 index fd386f83385ea..0000000000000 --- a/airflow-core/newsfragments/42579.significant.rst +++ /dev/null @@ -1,32 +0,0 @@ -Rename ``Dataset`` endpoints as ``Asset`` endpoints - -* list of changes - - * Rename dataset endpoints as asset endpoints - - * Rename ``/datasets`` as ``/assets`` - * Rename ``/datasets/{uri}`` as ``/assets/{uri}`` - * Rename ``/datasets/events`` as ``/assets/events`` - * Rename ``/datasets/queuedEvent/{uri}`` as ``/ui/next_run_assets/upstream`` - * Rename ``/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamDatasetEvents`` as ``/ui/next_run_assets/upstream`` - * Rename ``/dags/{dag_id}/datasets/queuedEvent/{uri}`` as ``/ui/next_run_assets/upstream`` - * Rename ``/dags/{dag_id}/datasets/queuedEvent`` as ``/ui/next_run_assets/upstream`` - * Rename ``/ui/next_run_datasets/upstream`` as ``/ui/next_run_assets/upstream`` - - * Rename dataset schema as asset endpoints - - * Rename ``AssetCollection.datasets`` as ``AssetCollection.assets`` - * Rename ``AssetEventCollection.dataset_events`` as ``AssetEventCollection.asset_events`` - * Rename ``AssetEventCollectionSchema.dataset_events`` as ``AssetEventCollectionSchema.asset_events`` - * Rename ``CreateAssetEventSchema.dataset_uri`` as ``CreateAssetEventSchema.asset_uri`` - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42640.significant.rst b/airflow-core/newsfragments/42640.significant.rst deleted file mode 100644 index e5d0e81f3da11..0000000000000 --- a/airflow-core/newsfragments/42640.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Removed deprecated custom dag dependency detector. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42647.significant.rst b/airflow-core/newsfragments/42647.significant.rst deleted file mode 100644 index 71bee3bb3fb9b..0000000000000 --- a/airflow-core/newsfragments/42647.significant.rst +++ /dev/null @@ -1,30 +0,0 @@ -Removed deprecated aliases support for providers. - -* Removed deprecated ``atlas`` alias support. Please use ``apache-atlas`` instead. -* Removed deprecated ``aws`` alias support. Please use ``amazon`` instead. -* Removed deprecated ``azure`` alias support. Please use ``microsoft-azure`` instead. -* Removed deprecated ``cassandra`` alias support. Please use ``apache-cassandra`` instead. -* Removed deprecated ``crypto`` alias support. -* Removed deprecated ``druid`` alias support. Please use ``apache-druid`` instead. -* Removed deprecated ``gcp`` alias support. Please use ``google`` instead. -* Removed deprecated ``gcp-api`` alias support. Please use ``google`` instead. -* Removed deprecated ``hdfs`` alias support. Please use ``apache-hdfs`` instead. -* Removed deprecated ``hive`` alias support. Please use ``apache-hive`` instead. -* Removed deprecated ``kubernetes`` alias support. Please use ``cncf-kubernetes`` instead. -* Removed deprecated ``mssql`` alias support. Please use ``microsoft-mssql`` instead. -* Removed deprecated ``pinot`` alias support. Please use ``apache-pinot`` instead. -* Removed deprecated ``s3`` alias support. Please use ``amazon`` instead. -* Removed deprecated ``spark`` alias support. Please use ``apache-spark`` instead. -* Removed deprecated ``webhdfs`` alias support. Please use ``apache-webhdfs`` instead. -* Removed deprecated ``winrm`` alias support. Please use ``microsoft-winrm`` instead. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42658.significant.rst b/airflow-core/newsfragments/42658.significant.rst deleted file mode 100644 index 84381da3f02a5..0000000000000 --- a/airflow-core/newsfragments/42658.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -* Changing dag_id from flag (-d, --dag-id) to a positional argument in the 'dags list-runs' CLI command. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42660.significant.rst b/airflow-core/newsfragments/42660.significant.rst deleted file mode 100644 index 924259e11151d..0000000000000 --- a/airflow-core/newsfragments/42660.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Deprecated field ``concurrency`` from ``DAGDetailSchema`` has been removed. Please use ``max_active_tasks`` from ``DAGDetailSchema`` instead. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42739.significant.rst b/airflow-core/newsfragments/42739.significant.rst deleted file mode 100644 index eb565c37e494a..0000000000000 --- a/airflow-core/newsfragments/42739.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Remove support for Python 3.8 as this version is not maintained within Python release schedule, see https://peps.python.org/pep-0596/. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42776.significant.rst b/airflow-core/newsfragments/42776.significant.rst deleted file mode 100644 index 188bcd8315442..0000000000000 --- a/airflow-core/newsfragments/42776.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Removed deprecated ``Chainable`` type from ``BaseOperator``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/42783.improvement.rst b/airflow-core/newsfragments/42783.improvement.rst deleted file mode 100644 index eb6a2181bd06f..0000000000000 --- a/airflow-core/newsfragments/42783.improvement.rst +++ /dev/null @@ -1 +0,0 @@ -Bash script files (``.sh`` and ``.bash``) with Jinja templating enabled (without the space after the file extension) are now rendered into a temporary file, and then executed. Instead of being directly executed as inline command. diff --git a/airflow-core/newsfragments/42794.significant.rst b/airflow-core/newsfragments/42794.significant.rst deleted file mode 100644 index 7113d0ced5094..0000000000000 --- a/airflow-core/newsfragments/42794.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Move filesystem, package_index, subprocess hooks to standard provider - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.hooks.filesystem.*`` → ``airflow.providers.standard.hooks.filesystem.*`` - * [x] ``airflow.hooks.package_index.*`` → ``airflow.providers.standard.hooks.package_index.*`` - * [x] ``airflow.hooks.subprocess.*`` → ``airflow.providers.standard.hooks.subprocess.*`` diff --git a/airflow-core/newsfragments/42953.significant.rst b/airflow-core/newsfragments/42953.significant.rst deleted file mode 100644 index eb301b1197e7a..0000000000000 --- a/airflow-core/newsfragments/42953.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -``DAG.max_active_tasks`` is now evaluated per-run - -Previously, this was evaluated across all runs of the dag. This behavior change was passed by lazy consensus. -Vote thread: https://lists.apache.org/thread/9o84d3yn934m32gtlpokpwtbbmtxj47l. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43067.significant.rst b/airflow-core/newsfragments/43067.significant.rst deleted file mode 100644 index 65501c54a6bf5..0000000000000 --- a/airflow-core/newsfragments/43067.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -Remove DAG.get_num_active_runs - -We don't need this function. There's already an almost-identical function on DagRun that we can use, namely DagRun.active_runs_of_dags. -Also, make DagRun.active_runs_of_dags private. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43073.significant.rst b/airflow-core/newsfragments/43073.significant.rst deleted file mode 100644 index 9dd26a3bf175f..0000000000000 --- a/airflow-core/newsfragments/43073.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Rename ``DagRunTriggeredByType.DATASET`` as ``DagRunTriggeredByType.ASSET`` and all the name ``dataset`` in all the UI component to ``asset``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43096.significant.rst b/airflow-core/newsfragments/43096.significant.rst deleted file mode 100644 index 1803b9ac041b7..0000000000000 --- a/airflow-core/newsfragments/43096.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed auth backend ``airflow.api.auth.backend.default`` - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.api.auth.backend.default`` → ``airflow.providers.fab.auth_manager.api.auth.backend.session`` diff --git a/airflow-core/newsfragments/43102.significant.rst b/airflow-core/newsfragments/43102.significant.rst deleted file mode 100644 index 18e59e5ac22ac..0000000000000 --- a/airflow-core/newsfragments/43102.significant.rst +++ /dev/null @@ -1,29 +0,0 @@ -Change in query parameter handling for list parameters - -The handling of list-type query parameters in the API has been updated. -FastAPI defaults the ``explode`` behavior to ``true`` for list parameters, -which affects how these parameters are passed in requests. -This adjustment applies to all list-type query parameters across the API. - -Before: - -.. code-block:: - - http://:/?param=item1,item2 - -After: - -.. code-block:: - - http://:/?param=item1¶m=item2 - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43183.significant.rst b/airflow-core/newsfragments/43183.significant.rst deleted file mode 100644 index 7f3e28aa0c5a0..0000000000000 --- a/airflow-core/newsfragments/43183.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Remove TaskContextLogger - -We introduced this as a way to inject messages into task logs from places -other than the task execution context. We later realized that we were better off -just using the Log table. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config list`` - - * [x] ``logging.enable_task_context_logger`` diff --git a/airflow-core/newsfragments/43289.significant.rst b/airflow-core/newsfragments/43289.significant.rst deleted file mode 100644 index aa6a51d89907f..0000000000000 --- a/airflow-core/newsfragments/43289.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -Support for adding executors via Airflow Plugins is removed - -Executors should no longer be registered or imported via Airflow's plugin mechanism -- these types of classes -are just treated as plain Python classes by Airflow, so there is no need to register them with Airflow. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [x] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] extension ``executors`` in ``airflow.plugins_manager.AirflowPlugin`` - * [x] extension ``operators`` in ``airflow.plugins_manager.AirflowPlugin`` - * [x] extension ``sensors`` in ``airflow.plugins_manager.AirflowPlugin`` diff --git a/airflow-core/newsfragments/43291.significant.rst b/airflow-core/newsfragments/43291.significant.rst deleted file mode 100644 index 227ccda5fdd14..0000000000000 --- a/airflow-core/newsfragments/43291.significant.rst +++ /dev/null @@ -1,35 +0,0 @@ -Support for adding Hooks via Airflow Plugins is removed - -Hooks should no longer be registered or imported via Airflow's plugin mechanism -- these types of classes -are just treated as plain Python classes by Airflow, so there is no need to register them with Airflow. - -Before: - -.. code-block:: python - - from airflow.hooks.my_plugin import MyHook - -You should instead import it as: - -.. code-block:: python - - from my_plugin import MyHook - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [x] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] extension ``hooks`` in ``airflow.plugins_manager.AirflowPlugin`` diff --git a/airflow-core/newsfragments/43349.significant.rst b/airflow-core/newsfragments/43349.significant.rst deleted file mode 100644 index 23af4ec5697f8..0000000000000 --- a/airflow-core/newsfragments/43349.significant.rst +++ /dev/null @@ -1,24 +0,0 @@ -Deprecated trigger rule ``TriggerRule.DUMMY`` removed - -**Breaking Change** - -The trigger rule ``TriggerRule.DUMMY`` was removed. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.trigger_rule.TriggerRule.DUMMY`` diff --git a/airflow-core/newsfragments/43490.significant.rst b/airflow-core/newsfragments/43490.significant.rst deleted file mode 100644 index 7d17cbec439a1..0000000000000 --- a/airflow-core/newsfragments/43490.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -The ``task_fail`` table has been removed from the Airflow database. - -This table was used to store task failures, but it was not used by any Airflow components. -Use the REST API to get task failures instead (which gets it from the ``task_instance`` table) - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43530.significant.rst b/airflow-core/newsfragments/43530.significant.rst deleted file mode 100644 index 0876bd92ef9d8..0000000000000 --- a/airflow-core/newsfragments/43530.significant.rst +++ /dev/null @@ -1,47 +0,0 @@ -Direct Access to Deprecated ``airflow.configuration`` Module Functions Removed - -Functions previously accessible directly via the ``airflow.configuration`` module, -such as ``get``, ``getboolean``, ``getfloat``, ``getint``, ``has_option``, ``remove_option``, ``as_dict``, and ``set``, -have been removed. These functions should now be accessed through ``airflow.configuration.conf``. - -Before: - -.. code-block:: python - - from airflow.configuration import get - - value = get("section", "key") - -After: - -.. code-block:: python - - from airflow.configuration import conf - - value = conf.get("section", "key") - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.configuration.getboolean`` → ``airflow.configuration.conf.getboolean`` - * [x] ``airflow.configuration.getfloat`` → ``airflow.configuration.conf.getfloat`` - * [x] ``airflow.configuration.getint`` → ``airflow.configuration.conf.getint`` - * [x] ``airflow.configuration.has_option`` → ``airflow.configuration.conf.has_option`` - * [x] ``airflow.configuration.remove_option`` → ``airflow.configuration.conf.remove_option`` - * [x] ``airflow.configuration.as_dict`` → ``airflow.configuration.conf.as_dict`` - * [x] ``airflow.configuration.set`` → ``airflow.configuration.conf.set`` - * [x] ``airflow.configuration.get`` → ``airflow.configuration.conf.get`` diff --git a/airflow-core/newsfragments/43533.significant.rst b/airflow-core/newsfragments/43533.significant.rst deleted file mode 100644 index c59f931005300..0000000000000 --- a/airflow-core/newsfragments/43533.significant.rst +++ /dev/null @@ -1,30 +0,0 @@ -Unused and redundant functions from ``airflow.utils.dates`` module have been removed. - -Following functions are removed: - -- ``parse_execution_date`` -- ``round_time`` -- ``scale_time_units`` -- ``infer_time_unit`` - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.dates.parse_execution_date`` - * [x] ``airflow.utils.dates.round_time`` - * [x] ``airflow.utils.dates.scale_time_units`` - * [x] ``airflow.utils.dates.infer_time_unit`` diff --git a/airflow-core/newsfragments/43562.significant.rst b/airflow-core/newsfragments/43562.significant.rst deleted file mode 100644 index 98b232213e05d..0000000000000 --- a/airflow-core/newsfragments/43562.significant.rst +++ /dev/null @@ -1,44 +0,0 @@ -Removed Deprecated Python Version Identifiers from the ``airflow`` Module - -Python version check constants, such as ``PY36``, ``PY37``, and others, have been removed from the ``airflow`` -module. To check Python versions, please use the ``sys.version_info`` attribute directly instead. - -Before: - -.. code-block:: python - - from airflow import PY36 - - if PY36: - # perform some action - ... - -After: - -.. code-block:: python - - import sys - - if sys.version_info >= (3, 6): - # perform some action - ... - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.PY\d\d`` diff --git a/airflow-core/newsfragments/43568.significant.rst b/airflow-core/newsfragments/43568.significant.rst deleted file mode 100644 index 801e7739f661e..0000000000000 --- a/airflow-core/newsfragments/43568.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -Remove ``virtualenv`` extra as PythonVirtualenvOperator has been moved to standard provider and switched to use built-in venv package. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43608.significant.rst b/airflow-core/newsfragments/43608.significant.rst deleted file mode 100644 index 8082b0725d3f4..0000000000000 --- a/airflow-core/newsfragments/43608.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Move Airflow core triggers to standard provider - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.triggers.external_task.*`` → ``airflow.providers.standard.triggers.external_task.*`` - * [x] ``airflow.triggers.file.*`` → ``airflow.providers.standard.triggers.file.*`` - * [x] ``airflow.triggers.temporal.*`` → ``airflow.providers.standard.triggers.temporal.*`` diff --git a/airflow-core/newsfragments/43611.significant.rst b/airflow-core/newsfragments/43611.significant.rst deleted file mode 100644 index 2f6e51fd78de4..0000000000000 --- a/airflow-core/newsfragments/43611.significant.rst +++ /dev/null @@ -1,17 +0,0 @@ -TaskInstance ``priority_weight`` is capped in 32-bit signed integer ranges. - -Some database engines are limited to 32-bit integer values. As some users reported errors in -weight rolled-over to negative values, we decided to cap the value to the 32-bit integer. Even -if internally in python smaller or larger values to 64 bit are supported, ``priority_weight`` is -capped and only storing values from -2147483648 to 2147483647. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43612.significant.rst b/airflow-core/newsfragments/43612.significant.rst deleted file mode 100644 index 1b37228e11510..0000000000000 --- a/airflow-core/newsfragments/43612.significant.rst +++ /dev/null @@ -1,19 +0,0 @@ -Virtualenv installation uses ``uv`` now per default if ``uv`` is available. - -If you want to control how the virtualenv is created, you can use the -AIRFLOW__STANDARD__VENV_INSTALL_METHOD option. The possible values are: - -- ``auto``: Automatically select, use ``uv`` if available, otherwise use ``pip``. -- ``pip``: Use pip to install the virtual environment. -- ``uv``: Use uv to install the virtual environment. Must be available in environment PATH. - -* Types of change - - * [x] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43679.misc.rst b/airflow-core/newsfragments/43679.misc.rst deleted file mode 100644 index 792087dc63a75..0000000000000 --- a/airflow-core/newsfragments/43679.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Renamed ``airflow.security.kerberos.get_kerberos_principle`` to ``airflow.security.kerberos.get_kerberos_principal`` due to misspelling. diff --git a/airflow-core/newsfragments/43774.significant.rst b/airflow-core/newsfragments/43774.significant.rst deleted file mode 100644 index 30ba2885b1416..0000000000000 --- a/airflow-core/newsfragments/43774.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -``HookLineageCollector.create_asset`` now accept only keyword arguments - -To provider AIP-74 support, new arguments "name" and "group" are added to ``HookLineageCollector.create_asset``. -For easier change in the future, this function now takes only keyword arguments. - -.. Check the type of change that applies to this change - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [ ] Calling ``HookLineageCollector.create_asset`` with positional argument should raise an error diff --git a/airflow-core/newsfragments/43890.significant.rst b/airflow-core/newsfragments/43890.significant.rst deleted file mode 100644 index 9bf17e35a0890..0000000000000 --- a/airflow-core/newsfragments/43890.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Move filesystem sensor to standard provider - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.sensors.filesystem.FileSensor`` → ``airflow.providers.standard.sensors.filesystem.FileSensor`` diff --git a/airflow-core/newsfragments/43902.significant.rst b/airflow-core/newsfragments/43902.significant.rst deleted file mode 100644 index 262e23586ee28..0000000000000 --- a/airflow-core/newsfragments/43902.significant.rst +++ /dev/null @@ -1,35 +0,0 @@ -Renamed ``execution_date`` to ``logical_date`` across the codebase to align with Airflow 3.0. - -The shift towards ``logical_date`` helps move away from the limitations of ``execution_date``, particularly with dynamic DAG runs and cases where multiple runs occur at the same time. This change impacts database models, templates, and functions: - -- Renamed columns and function references to ``logical_date``. -- Removed ``execution_date``, ``next_ds``, ``next_ds_nodash``, ``next_execution_date``, ``prev_ds``, ``prev_ds_nodash``, ``prev_execution_date``, ``prev_execution_date_success``, ``tomorrow_ds``, ``yesterday_ds`` and ``yesterday_ds_nodash`` from Airflow ``context``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [ ] context key ``execution_date`` - * [ ] context key ``next_ds`` - * [ ] context key ``next_ds_nodash`` - * [ ] context key ``next_execution_date`` - * [ ] context key ``prev_ds`` - * [ ] context key ``prev_ds_nodash`` - * [ ] context key ``prev_execution_date`` - * [ ] context key ``prev_execution_date_success`` - * [ ] context key ``tomorrow_ds`` - * [ ] context key ``yesterday_ds`` - * [ ] context key ``yesterday_ds_nodash`` diff --git a/airflow-core/newsfragments/43915.significant.rst b/airflow-core/newsfragments/43915.significant.rst deleted file mode 100644 index a3ac8e96881a0..0000000000000 --- a/airflow-core/newsfragments/43915.significant.rst +++ /dev/null @@ -1,21 +0,0 @@ -Configuration ``[core] strict_dataset_uri_validation`` is removed - -Asset URI with a defined scheme will now always be validated strictly, raising -a hard error on validation failure. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``core.strict_dataset_uri_validation`` diff --git a/airflow-core/newsfragments/43943.significant.rst b/airflow-core/newsfragments/43943.significant.rst deleted file mode 100644 index 36d81af1647ee..0000000000000 --- a/airflow-core/newsfragments/43943.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Remove the ``traces`` ``otel_task_log_event`` event config option and feature - -This was sending the task logs form the scheduler, and would be a huge -scheduling performance hit (blocking the entire all scheduling while it was -fetching logs to attach to the trace) - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``traces.otel_task_log_event`` diff --git a/airflow-core/newsfragments/43949.significant.rst b/airflow-core/newsfragments/43949.significant.rst deleted file mode 100644 index 745c10d455def..0000000000000 --- a/airflow-core/newsfragments/43949.significant.rst +++ /dev/null @@ -1,16 +0,0 @@ -The ``--clear-only`` option of ``airflow dags reserialize`` command is now removed. - -The ``--clear-only`` option was added to clear the serialized DAGs without reserializing them. -This option has been removed as it is no longer needed. We have implemented DAG versioning and can -no longer delete serialized dag without going through ``airflow db-clean`` command. This command is now only for reserializing DAGs. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/43975.significant.rst b/airflow-core/newsfragments/43975.significant.rst deleted file mode 100644 index dd8e4c0268500..0000000000000 --- a/airflow-core/newsfragments/43975.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -Timer and timing metrics are now standardized to milliseconds - -In Airflow 3.0, the ``timer_unit_consistency`` setting in the ``metrics`` section is removed as it is now the default behaviour. -This is done to standardize all timer and timing metrics to milliseconds across all metric loggers. - -Airflow 2.11 introduced the ``timer_unit_consistency`` setting in the ``metrics`` section of the configuration file. The -default value was ``False`` which meant that the timer and timing metrics were logged in seconds. This was done to maintain -backwards compatibility with the previous versions of Airflow. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``metrics.timer_unit_consistency`` diff --git a/airflow-core/newsfragments/44053.significant.rst b/airflow-core/newsfragments/44053.significant.rst deleted file mode 100644 index 7b3054394d87d..0000000000000 --- a/airflow-core/newsfragments/44053.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Move ``TriggerDagRunOperator`` to standard provider - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.operators.trigger_dagrun import TriggerDagRunOperator`` → ``airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator`` diff --git a/airflow-core/newsfragments/44080.significant.rst b/airflow-core/newsfragments/44080.significant.rst deleted file mode 100644 index 3aeca5191e4b6..0000000000000 --- a/airflow-core/newsfragments/44080.significant.rst +++ /dev/null @@ -1,14 +0,0 @@ -PostgreSQL 12 is no longer supported - -PostgreSQL 12 is no longer being supported by the PostgreSQL community. You must upgrade to PostgreSQL 13+ to use this version of Airflow. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/44288.significant.rst b/airflow-core/newsfragments/44288.significant.rst deleted file mode 100644 index e1a38ac27cd39..0000000000000 --- a/airflow-core/newsfragments/44288.significant.rst +++ /dev/null @@ -1,21 +0,0 @@ - Move external task sensor to standard provider #44288 - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.sensors.external_task.ExternalTaskMarker`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskMarker`` - * [x] ``airflow.sensors.external_task.ExternalTaskSensor`` → ``airflow.providers.standard.sensors.external_task.ExternalTaskSensor`` diff --git a/airflow-core/newsfragments/44300.bugfix.rst b/airflow-core/newsfragments/44300.bugfix.rst deleted file mode 100644 index ffd4b07b2ab0d..0000000000000 --- a/airflow-core/newsfragments/44300.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix stats of dynamic mapped tasks after automatic retries of failed tasks diff --git a/airflow-core/newsfragments/44475.significant.rst b/airflow-core/newsfragments/44475.significant.rst deleted file mode 100644 index 4b55765e75fa1..0000000000000 --- a/airflow-core/newsfragments/44475.significant.rst +++ /dev/null @@ -1,26 +0,0 @@ -Remove ``TriggerRule.NONE_FAILED_OR_SKIPPED`` - -.. Provide additional contextual information - -.. Check the type of change that applies to this change - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``TriggerRule.NONE_FAILED_OR_SKIPPED`` diff --git a/airflow-core/newsfragments/44533.significant.rst b/airflow-core/newsfragments/44533.significant.rst deleted file mode 100644 index 5943741428d7f..0000000000000 --- a/airflow-core/newsfragments/44533.significant.rst +++ /dev/null @@ -1,16 +0,0 @@ -During offline migration, ``DagRun.conf`` is cleared - -.. Provide additional contextual information - -The ``conf`` column is changing from pickle to json, thus, the values in that column cannot be migrated during offline migrations. If you want to retain ``conf`` values for existing DagRuns, you must do a normal, non-offline, migration. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/44706.significant.rst b/airflow-core/newsfragments/44706.significant.rst deleted file mode 100644 index 6391000d4caf7..0000000000000 --- a/airflow-core/newsfragments/44706.significant.rst +++ /dev/null @@ -1,16 +0,0 @@ -Deprecated cli commands under ``db`` group removed - -The ``db init`` and ``db upgrade`` commands have been removed. Use ``db migrate`` instead to initialize or migrate the metadata database. - -If you would like to create default connections use ``airflow connections create-default-connections``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/44712.feature.rst b/airflow-core/newsfragments/44712.feature.rst deleted file mode 100644 index 9a26758c84368..0000000000000 --- a/airflow-core/newsfragments/44712.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add a new table to the db for the Deadline Alerts feature. diff --git a/airflow-core/newsfragments/44751.bugfix.rst b/airflow-core/newsfragments/44751.bugfix.rst deleted file mode 100644 index 1ca32178be1c5..0000000000000 --- a/airflow-core/newsfragments/44751.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -``TriggerRule.ALWAYS`` cannot be utilized within a task-generated mapping, either in bare tasks (fixed in this PR) or mapped task groups (fixed in PR #44368). The issue with doing so, is that the task is immediately executed without waiting for the upstreams's mapping results, which certainly leads to failure of the task. This fix avoids it by raising an exception when it is detected during DAG parsing. diff --git a/airflow-core/newsfragments/44820.significant.rst b/airflow-core/newsfragments/44820.significant.rst deleted file mode 100644 index c37be748c780c..0000000000000 --- a/airflow-core/newsfragments/44820.significant.rst +++ /dev/null @@ -1,43 +0,0 @@ -Removed ``conf`` from the Task template context - -The ``conf`` variable, which provided access to the full Airflow configuration (``airflow.cfg``), has been -removed from the Task (Jinja2) template context for security and simplicity. If you -need specific configuration values in your tasks, retrieve them explicitly in your DAG or task code -using the ``airflow.configuration.conf`` module. - -For users retrieving the webserver URL (e.g., to include log links in task or callbacks), one of the -most common use-case, use the ``ti.log_url`` property available in the ``TaskInstance`` context instead. - -Example: - -.. code-block:: python - - PythonOperator( - task_id="my_task", - python_callable=my_task_callable, - on_failure_callback=SmtpNotifier( - from_email="example@example.com", - to="example@example.com", - subject="Task {{ ti.task_id }} failed", - html_content="Task {{ ti.task_id }} failed. Log URL: {{ ti.log_url }}", - ), - ) - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [ ] context key ``conf`` diff --git a/airflow-core/newsfragments/44908.feature.rst b/airflow-core/newsfragments/44908.feature.rst deleted file mode 100644 index 2a711a0149322..0000000000000 --- a/airflow-core/newsfragments/44908.feature.rst +++ /dev/null @@ -1 +0,0 @@ -The ``airflow config lint`` command has been introduced to help users migrate from Airflow 2.x to 3.0 by identifying removed or renamed configuration parameters in airflow.cfg. diff --git a/airflow-core/newsfragments/45017.significant.rst b/airflow-core/newsfragments/45017.significant.rst deleted file mode 100644 index b1140fcf8d434..0000000000000 --- a/airflow-core/newsfragments/45017.significant.rst +++ /dev/null @@ -1,23 +0,0 @@ -Remove deprecated ``DEFAULT_CELERY_CONFIG`` from config templates - -``DEFAULT_CELERY_CONFIG`` has been moved into the celery provider and -should be imported from ``airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG`` → ``airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG`` diff --git a/airflow-core/newsfragments/45134.bugfix.rst b/airflow-core/newsfragments/45134.bugfix.rst deleted file mode 100644 index 09aaae23a3487..0000000000000 --- a/airflow-core/newsfragments/45134.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -(v2 API & UI) Allow fetching XCom with forward slash from the API and escape it in the UI diff --git a/airflow-core/newsfragments/45327.significant.rst b/airflow-core/newsfragments/45327.significant.rst deleted file mode 100644 index 1e7423cd2e6ea..0000000000000 --- a/airflow-core/newsfragments/45327.significant.rst +++ /dev/null @@ -1,21 +0,0 @@ -Renamed DAG argument ``fail_stop`` to ``fail_fast`` across the codebase to align with Airflow 3.0. - - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] arguments ``fail_stop`` → ``fail_fast`` in ``DAG`` diff --git a/airflow-core/newsfragments/45425.significant.rst b/airflow-core/newsfragments/45425.significant.rst deleted file mode 100644 index 8fb3c3d477083..0000000000000 --- a/airflow-core/newsfragments/45425.significant.rst +++ /dev/null @@ -1,21 +0,0 @@ -The ``airflow.io`` class ``ObjectStoragePath`` and function ``attach`` are moved to ``airflow.sdk``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [ ] ``airflow.io.path.ObjectStoragePath`` → ``airflow.sdk.ObjectStoragePath`` - * [ ] ``airflow.io.attach`` → ``airflow.sdk.io.attach`` diff --git a/airflow-core/newsfragments/45530.significant.rst b/airflow-core/newsfragments/45530.significant.rst deleted file mode 100644 index 5805dc3789e2b..0000000000000 --- a/airflow-core/newsfragments/45530.significant.rst +++ /dev/null @@ -1,23 +0,0 @@ -Ensure teardown tasks are executed when DAG run is set to failed - -Previously when a DAG run was manually set to "failed" or to "success" state the terminal state was set to all tasks. -But this was a gap for cases when setup- and teardown tasks were defined: If teardown was used to clean-up infrastructure -or other resources, they were also skipped and thus resources could stay allocated. - -As of now when setup tasks had been executed before and the DAG is manually set to "failed" or "success" then teardown -tasks are executed. Teardown tasks are skipped if the setup was also skipped. - -As a side effect this means if the DAG contains teardown tasks, then the manual marking of DAG as "failed" or "success" -will need to keep the DAG in running state to ensure that teardown tasks will be scheduled. They would not be scheduled -if the DAG is diorectly set to "failed" or "success". - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/45694.significant.rst b/airflow-core/newsfragments/45694.significant.rst deleted file mode 100644 index 222083bdf2e7e..0000000000000 --- a/airflow-core/newsfragments/45694.significant.rst +++ /dev/null @@ -1,52 +0,0 @@ -``get_parsing_context`` have been moved to Task SDK - -As part of AIP-72: Task SDK, the function ``get_parsing_context`` has been moved to ``airflow.sdk`` module. -Previously, it was located in ``airflow.utils.dag_parsing_context`` module. - -This function is used to optimize DAG parsing during execution when DAGs are generated dynamically. - -Before: - -.. code-block:: python - - from airflow.models.dag import DAG - from airflow.utils.dag_parsing_context import get_parsing_context - - current_dag_id = get_parsing_context().dag_id - - for thing in list_of_things: - dag_id = f"generated_dag_{thing}" - if current_dag_id is not None and current_dag_id != dag_id: - continue # skip generation of non-selected DAG - - with DAG(dag_id=dag_id, ...): - ... - -After: - -.. code-block:: python - - from airflow.sdk import get_parsing_context - - current_dag_id = get_parsing_context().dag_id - - # The rest of the code remains the same - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.dag_parsing_context.get_parsing_context`` -> ``airflow.sdk.get_parsing_context`` diff --git a/airflow-core/newsfragments/45729.significant.rst b/airflow-core/newsfragments/45729.significant.rst deleted file mode 100644 index 5a722119cdf7d..0000000000000 --- a/airflow-core/newsfragments/45729.significant.rst +++ /dev/null @@ -1,14 +0,0 @@ -Standalone DAG processor is now required - -The scheduler is no longer able to parse DAGs itself - it relies on the standalone DAG processor (introduced in Airflow 2.3) to do it instead. You can start one by running ``airflow dag-processor``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/45960.significant.rst b/airflow-core/newsfragments/45960.significant.rst deleted file mode 100644 index dcc581ca137f8..0000000000000 --- a/airflow-core/newsfragments/45960.significant.rst +++ /dev/null @@ -1,37 +0,0 @@ -Change how asset uris are accessed in inlet_events - -We used to be able to read asset uri through - -.. code-block:: python - - @task - def access_inlet_events(inlet_events): - print(inlet_events["uri"]) - -Now we'll need to do - -.. code-block:: python - - @task - def access_inlet_events(inlet_events): - print(inlet_events["asset"]["uri"]) - - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [ ] context_key ``inlet_events.uri`` → ``inlet_events.asset.uri`` diff --git a/airflow-core/newsfragments/45961.significant.rst b/airflow-core/newsfragments/45961.significant.rst deleted file mode 100644 index a8c763e0dfb1f..0000000000000 --- a/airflow-core/newsfragments/45961.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Replace the ``external_trigger`` check with ``DagRunType``, and update any logic that relies on ``external_trigger`` to use ``run_type`` instead. - - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [ ] argument ``external_trigger`` in ``airflow...DAG.create_dagrun`` - * [ ] context key ``dag_run.external_trigger`` diff --git a/airflow-core/newsfragments/46231.significant.rst b/airflow-core/newsfragments/46231.significant.rst deleted file mode 100644 index 18b20ec71a19b..0000000000000 --- a/airflow-core/newsfragments/46231.significant.rst +++ /dev/null @@ -1,23 +0,0 @@ -Moving EmptyOperator from Airflow core to the ``standard`` provider. - -EmptyOperator has been moved from Airflow core (``airflow.operators directory``) to the ``standard`` provider. -For new and existing DAGs, users must import ``EmptyOperator`` from ``airflow.providers.standard.operators.empty``. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.operators.empty.EmptyOperator`` → ``airflow.providers.standard.operators.empty.EmptyOperator`` diff --git a/airflow-core/newsfragments/46375.significant.rst b/airflow-core/newsfragments/46375.significant.rst deleted file mode 100644 index dafd2b9f3875d..0000000000000 --- a/airflow-core/newsfragments/46375.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -``SecretsMasker`` has now been moved into the task SDK to be consumed by DAG authors and users - -Any occurrences of the ``secrets_masker`` module will have to be updated from ``airflow.utils.log.secrets_masker`` to the new path: ``airflow.sdk.execution_time.secrets_masker`` - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.log.secrets_masker`` → ``airflow.sdk.execution_time.secrets_masker`` diff --git a/airflow-core/newsfragments/46408.significant.rst b/airflow-core/newsfragments/46408.significant.rst deleted file mode 100644 index ebfa4b9e614ad..0000000000000 --- a/airflow-core/newsfragments/46408.significant.rst +++ /dev/null @@ -1,30 +0,0 @@ -DAG processor related config options removed - -The follow configuration options have been removed: - -- ``[logging] dag_processor_manager_log_location`` -- ``[logging] dag_processor_manager_log_stdout`` -- ``[logging] log_processor_filename_template`` - -If these config options are still present, they will have no effect any longer. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``[logging] dag_processor_manager_log_location`` - * [x] ``[logging] dag_processor_manager_log_stdout`` - * [x] ``[logging] log_processor_filename_template`` diff --git a/airflow-core/newsfragments/46415.significant.rst b/airflow-core/newsfragments/46415.significant.rst deleted file mode 100644 index ac2cd6f84d2f6..0000000000000 --- a/airflow-core/newsfragments/46415.significant.rst +++ /dev/null @@ -1,26 +0,0 @@ -Legacy signature for operator link is removed. - -``BaseOperatorLink.get_link`` used to accept execution date as an argument. This -has been changed to accept ``ti_key`` to identify a task instance instead. The -old signature, supported at runtime for compatibility, has been removed. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [ ] Signature of ``airflow.models.baseoperatorlink.BaseOperatorLink.get_link`` changed - - .. detailed in https://github.com/apache/airflow/pull/46415#issuecomment-2636186625 diff --git a/airflow-core/newsfragments/46573.significant.rst b/airflow-core/newsfragments/46573.significant.rst deleted file mode 100644 index 20a5b7dd5fa26..0000000000000 --- a/airflow-core/newsfragments/46573.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -In Airflow 3.0, ``airflow.operators.email.EmailOperator`` is removed. - -Instead, users can install ``smtp`` provider and import ``EmailOperator`` from the the module ``airflow.providers.smtp.operators.smtp``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.operators.email.EmailOperator`` → ``airflow.providers.smtp.operators.smtp.EmailOperator`` diff --git a/airflow-core/newsfragments/46613.significant.rst b/airflow-core/newsfragments/46613.significant.rst deleted file mode 100644 index c0c0a5b04498e..0000000000000 --- a/airflow-core/newsfragments/46613.significant.rst +++ /dev/null @@ -1,45 +0,0 @@ -Operator Links interface changed to not run user code in Airflow Webserver - -The Operator Extra links, which can be defined either via plugins or custom operators -now do not execute any user code in the Airflow Webserver, but instead push the "full" -links to XCom backend and the link is fetched from the XCom backend when viewing -task details, for example from grid view. - -Example for users with custom links class: - -.. code-block:: python - - @attr.s(auto_attribs=True) - class CustomBaseIndexOpLink(BaseOperatorLink): - """Custom Operator Link for Google BigQuery Console.""" - - index: int = attr.ib() - - @property - def name(self) -> str: - return f"BigQuery Console #{self.index + 1}" - - @property - def xcom_key(self) -> str: - return f"bigquery_{self.index + 1}" - - def get_link(self, operator, *, ti_key): - search_queries = XCom.get_one( - task_id=ti_key.task_id, dag_id=ti_key.dag_id, run_id=ti_key.run_id, key="search_query" - ) - return f"https://console.cloud.google.com/bigquery?j={search_query}" - -The link has an xcom_key defined, which is how it will be stored in the XCOM backend, with key as xcom_key and -value as the entire link, this case: https://console.cloud.google.com/bigquery?j=search - - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/46663.significant.rst b/airflow-core/newsfragments/46663.significant.rst deleted file mode 100644 index b347d4dbd69df..0000000000000 --- a/airflow-core/newsfragments/46663.significant.rst +++ /dev/null @@ -1,31 +0,0 @@ -Removed configuration ``scheduler.allow_trigger_in_future``. - -A DAG run with logical date in the future can never be started now. This only affects ``schedule=None``. - -Instead of using a future date, you can trigger with ``logical_date=None``. A custom ``run_id`` can be supplied if desired. If a date is needed, it can be passed as a DAG param instead. - -Property ``allow_future_exec_dates`` on the DAG class has also been removed. - - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] property ``airflow...DAG.allow_future_exec_dates`` - - * ``airflow config lint`` - - * [x] ``scheduler.allow_trigger_in_future`` diff --git a/airflow-core/newsfragments/46916.significant.rst b/airflow-core/newsfragments/46916.significant.rst deleted file mode 100644 index 9f24753713468..0000000000000 --- a/airflow-core/newsfragments/46916.significant.rst +++ /dev/null @@ -1,19 +0,0 @@ -Public API authentication is migrated to JWT token based authentication for default (Simple Auth Manager) and FAB provider. - -The default setting is using API to create a token (JWT) to authenticate the requests to access the API. -The endpoints are populated under ``/auth`` path. -If none of the providers are installed such as FAB, the API will use the default use Simple Auth Manager in the core. - -To integrate the same functioning into API requests using FAB provider. Please install ``apache-airflow-providers-fab``. -For more information, please look at :doc:`apache-airflow-providers-fab:auth-manager/api-authentication`. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/46929.bugfix.rst b/airflow-core/newsfragments/46929.bugfix.rst new file mode 100644 index 0000000000000..bff83a3807dae --- /dev/null +++ b/airflow-core/newsfragments/46929.bugfix.rst @@ -0,0 +1 @@ +Added validation in XCom.set() to disallow None or empty string keys; XCom.get() still allows None as key but disallows empty strings. diff --git a/airflow-core/newsfragments/47070.significant.rst b/airflow-core/newsfragments/47070.significant.rst deleted file mode 100644 index 2b212933b9b03..0000000000000 --- a/airflow-core/newsfragments/47070.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -Auto data interval calculation is disabled by default - -Configurations ``[scheduler] create_cron_data_intervals`` and ``create_delta_data_intervals`` are now *False* -by default. This means schedules specified using cron expressions or time deltas now have their logical date -set to *when a new run can start* instead of one data interval before. - -See :ref:`Differences between "trigger" and "data interval" timetables` for more information. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config update`` - - * [x] ``scheduler.create_cron_data_intervals`` - * [x] ``scheduler.create_delta_data_intervals`` diff --git a/airflow-core/newsfragments/47083.significant.rst b/airflow-core/newsfragments/47083.significant.rst deleted file mode 100644 index 6d8401935991b..0000000000000 --- a/airflow-core/newsfragments/47083.significant.rst +++ /dev/null @@ -1,45 +0,0 @@ -``airflow api-server`` has replaced ``airflow webserver`` cli command - -The new Airflow UI is now being served as part of the ``airflow api-server`` command and the ``airflow webserver`` command has been removed. - -The following configuration options have moved to the ``[api]`` section: - -- ``[webserver] web_server_host`` has been moved to ``[api] host`` -- ``[webserver] web_server_port`` has been moved to ``[api] port`` -- ``[webserver] workers`` has been moved to ``[api] workers`` -- ``[webserver] web_server_worker_timeout`` has been moved to ``[api] worker_timeout`` -- ``[webserver] web_server_ssl_cert`` has been moved to ``[api] ssl_cert`` -- ``[webserver] web_server_ssl_key`` has been moved to ``[api] ssl_key`` -- ``[webserver] access_logfile`` has been moved to ``[api] access_logfile`` - -The following configuration options have been removed: - -- ``[webserver] error_logfile`` -- ``[webserver] access_logformat`` - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``[webserver] web_server_host`` → ``[api] host`` - * [x] ``[webserver] web_server_port`` → ``[api] port`` - * [x] ``[webserver] workers`` → ``[api] workers`` - * [x] ``[webserver] web_server_worker_timeout`` → ``[api] worker_timeout`` - * [x] ``[webserver] web_server_ssl_cert`` → ``[api] ssl_cert`` - * [x] ``[webserver] web_server_ssl_key`` → ``[api] ssl_key`` - * [x] ``[webserver] access_logfile`` → ``[api] access_logfile`` - * [x] ``[webserver] error_logfile`` removed - * [x] ``[webserver] access_logformat`` removed diff --git a/airflow-core/newsfragments/47131.significant.rst b/airflow-core/newsfragments/47131.significant.rst deleted file mode 100644 index e8bf03ff3eae5..0000000000000 --- a/airflow-core/newsfragments/47131.significant.rst +++ /dev/null @@ -1,12 +0,0 @@ -The Airflow UI is now started with the ``airflow api-server`` command. The ``airflow webserver`` command has been removed. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/47136.significant.rst b/airflow-core/newsfragments/47136.significant.rst deleted file mode 100644 index d881cd789fa99..0000000000000 --- a/airflow-core/newsfragments/47136.significant.rst +++ /dev/null @@ -1,13 +0,0 @@ -``uri`` is replaced with ``id`` in ``AssetDetails`` . Hence, ``is_authorized_asset`` method needs to be updated in Auth Managers to use ``id`` instead of ``uri``. - - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [X] Code interface changes diff --git a/airflow-core/newsfragments/47264.significant.rst b/airflow-core/newsfragments/47264.significant.rst deleted file mode 100644 index d66c18dd12644..0000000000000 --- a/airflow-core/newsfragments/47264.significant.rst +++ /dev/null @@ -1,16 +0,0 @@ -Removed leftover deprecations prior to 3.0.0. - -* Removed the ``RemovedInAirflow3Warning`` warning class. -* Removed the deprecated module ``airflow.api.auth.backend.session``. Please use ``airflow.providers.fab.auth_manager.api.auth.backend.session`` instead. -* Removed the deprecated ``cleanup_stuck_queued_tasks`` method from the ``BaseExecutor`` interface. It is replaced by function ``revoke_task``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes diff --git a/airflow-core/newsfragments/47354.significant.rst b/airflow-core/newsfragments/47354.significant.rst deleted file mode 100644 index d967cb7242859..0000000000000 --- a/airflow-core/newsfragments/47354.significant.rst +++ /dev/null @@ -1,19 +0,0 @@ -The ``catchup_by_default`` configuration is now ``False`` by default. This means dags which do not explicitly define ``catchup`` will not display catchup behavior. - -* Types of change - - * [x] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``scheduler.catchup_by_default`` default change from ``True`` to ``False``. diff --git a/airflow-core/newsfragments/47399.significant.rst b/airflow-core/newsfragments/47399.significant.rst deleted file mode 100644 index bd84287acec61..0000000000000 --- a/airflow-core/newsfragments/47399.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -Removed auth backends. Auth backends are no longer used in Airflow 3. Please refer to documentation on how to use Airflow 3 public API. - -Moved the configuration ``[api] auth_backends`` to ``[fab] auth_backends``. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``api.auth_backends`` → ``fab.auth_backends`` diff --git a/airflow-core/newsfragments/47414.significant.rst b/airflow-core/newsfragments/47414.significant.rst deleted file mode 100644 index 6c7f9ac84b047..0000000000000 --- a/airflow-core/newsfragments/47414.significant.rst +++ /dev/null @@ -1,17 +0,0 @@ -Default connections no longer created by ``airflow db reset`` - -When default connection creation was removed from the ``airflow db migrate`` command in in 2.7, -``airflow db reset`` was missed and still used the deprecated configuration option -``[database] load_default_connections``. ``airflow db reset`` no longer does, so after a DB reset you must call -``airflow connections create-default-connections`` explicitly if you'd like the default connections to be created. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/47441.significant.rst b/airflow-core/newsfragments/47441.significant.rst deleted file mode 100644 index 05fe947efe0eb..0000000000000 --- a/airflow-core/newsfragments/47441.significant.rst +++ /dev/null @@ -1,15 +0,0 @@ -There are no more production bundle or devel extras - -There are no more production ``all*`` or ``devel*`` bundle extras available in ``wheel`` package of airflow. -If you want to install airflow with all extras you can use ``uv pip install --all-extras`` command. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/47599.significant.rst b/airflow-core/newsfragments/47599.significant.rst deleted file mode 100644 index 5e6c5c713a580..0000000000000 --- a/airflow-core/newsfragments/47599.significant.rst +++ /dev/null @@ -1,23 +0,0 @@ -Remove the option import create_session from db.py util - -The ability to create session from ``utils/db.py`` is removed. - - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.utils.db.create_session`` diff --git a/airflow-core/newsfragments/47761.significant.rst b/airflow-core/newsfragments/47761.significant.rst deleted file mode 100644 index 1da22d754bfa6..0000000000000 --- a/airflow-core/newsfragments/47761.significant.rst +++ /dev/null @@ -1,34 +0,0 @@ -Old default values no longer auto-updated - -There are a few configuration options that had old default values that no longer worked, so -Airflow 2 would automatically update them on-the-fly to the new default values. This is no longer -the case, and users need to update their configuration files to the new default values. - -The configuration options that were auto-updated in Airflow 2 are: - -- ``[core] hostname`` with value ``:`` -- ``[email] email_backend`` with value ``airflow.contrib.utils.sendgrid.send_email`` -- ``[logging] log_filename_template`` with value ``{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log`` -- ``[elasticsearch] log_id_template`` with value ``{dag_id}-{task_id}-{logical_date}-{try_number}`` - -If you have these configuration options in your ``airflow.cfg`` file, you need to update them. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config update`` - - * [x] Remove ``[core] hostname`` configuration option from config if value is ``:`` - * [x] Remove ``[email] email_backend`` configuration option from config if value is ``airflow.contrib.utils.sendgrid.send_email`` - * [x] Remove ``[logging] log_filename_template`` configuration option from config if value is ``{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log`` - * [x] Remove ``[elasticsearch] log_id_template`` configuration option from config if value is ``{dag_id}-{task_id}-{logical_date}-{try_number}`` diff --git a/airflow-core/newsfragments/47892.significant.rst b/airflow-core/newsfragments/47892.significant.rst deleted file mode 100644 index f6a1e0331b173..0000000000000 --- a/airflow-core/newsfragments/47892.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -Relocate utils.weekday from core to standard provider - -Removed module ``airflow.utils.weekday`` removed. Please use ``from airflow.providers.standard.utils.weekday`` instead. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ruff - - * AIR303 - - * [x] ``airflow.utils.weekday`` → ``airflow.providers.standard.utils.weekday`` diff --git a/airflow-core/newsfragments/47927.significant.rst b/airflow-core/newsfragments/47927.significant.rst deleted file mode 100644 index d9a017abf40d8..0000000000000 --- a/airflow-core/newsfragments/47927.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -Pre-installed providers are minimized per default with Airflow 3.0. - -Before Airflow 3.0 a set of providers were always pre-installed. We removed the pre-install of ftp, http and imap as they are not needed for all cases. - -The following providers are pre-installed by default: - -* common.compat -* common.io -* common.sql -* fab>=1.0.2 -* smtp -* sqlite -* standard - - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/48008.significant.rst b/airflow-core/newsfragments/48008.significant.rst deleted file mode 100644 index 58a76acc5c5b8..0000000000000 --- a/airflow-core/newsfragments/48008.significant.rst +++ /dev/null @@ -1,20 +0,0 @@ -The BaseNotifier class has been moved to ``airflow.sdk``. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ruff - - * AIR302 - - * [x] ``airflow.notifications.basenotifier.BaseNotifier`` → ``airflow.sdk.BaseNotifier`` diff --git a/airflow-core/newsfragments/48027.significant.rst b/airflow-core/newsfragments/48027.significant.rst deleted file mode 100644 index bdabd5d40e2e5..0000000000000 --- a/airflow-core/newsfragments/48027.significant.rst +++ /dev/null @@ -1,14 +0,0 @@ -``password`` extra has been removed - -Nothing is using the dependencies that are installed in the ``password`` extra (``bcrypt`` and ``flask-bcrypt``), so the extra has been removed. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/48066.significant.rst b/airflow-core/newsfragments/48066.significant.rst deleted file mode 100644 index c0767207321fa..0000000000000 --- a/airflow-core/newsfragments/48066.significant.rst +++ /dev/null @@ -1,78 +0,0 @@ -Unused webserver configuration options have been removed - -The following configuration options are now unused and have been removed: - -- ``[webserver] web_server_master_timeout`` -- ``[webserver] worker_refresh_batch_size`` -- ``[webserver] worker_refresh_interval`` -- ``[webserver] reload_on_plugin_change`` -- ``[webserver] worker_class`` -- ``[webserver] expose_stacktrace`` -- ``[webserver] log_fetch_delay_sec`` -- ``[webserver] log_auto_tailing_offset`` -- ``[webserver] log_animation_speed`` -- ``[webserver] default_dag_run_display_number`` -- ``[webserver] enable_proxy_fix`` -- ``[webserver] proxy_fix_x_for`` -- ``[webserver] proxy_fix_x_proto`` -- ``[webserver] proxy_fix_x_host`` -- ``[webserver] proxy_fix_x_port`` -- ``[webserver] proxy_fix_x_prefix`` -- ``[webserver] cookie_secure`` -- ``[webserver] analytics_tool`` -- ``[webserver] analytics_id`` -- ``[webserver] analytics_url`` -- ``[webserver] show_recent_stats_for_completed_runs`` -- ``[webserver] run_internal_api`` -- ``[webserver] caching_hash_method`` -- ``[webserver] show_trigger_form_if_no_params`` -- ``[webserver] num_recent_configurations_for_trigger`` -- ``[webserver] allowed_payload_size`` -- ``[webserver] max_form_memory_size`` -- ``[webserver] max_form_parts`` - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) - -* Migration rules needed - - * ``airflow config lint`` - - * [x] Remove configuration option ``[webserver] web_server_master_timeout`` - * [x] Remove configuration option ``[webserver] worker_refresh_batch_size`` - * [x] Remove configuration option ``[webserver] worker_refresh_interval`` - * [x] Remove configuration option ``[webserver] reload_on_plugin_change`` - * [x] Remove configuration option ``[webserver] worker_class`` - * [x] Remove configuration option ``[webserver] expose_stacktrace`` - * [x] Remove configuration option ``[webserver] log_fetch_delay_sec`` - * [x] Remove configuration option ``[webserver] log_auto_tailing_offset`` - * [x] Remove configuration option ``[webserver] log_animation_speed`` - * [x] Remove configuration option ``[webserver] default_dag_run_display_number`` - * [x] Remove configuration option ``[webserver] enable_proxy_fix`` - * [x] Remove configuration option ``[webserver] proxy_fix_x_for`` - * [x] Remove configuration option ``[webserver] proxy_fix_x_proto`` - * [x] Remove configuration option ``[webserver] proxy_fix_x_host`` - * [x] Remove configuration option ``[webserver] proxy_fix_x_port`` - * [x] Remove configuration option ``[webserver] proxy_fix_x_prefix`` - * [x] Remove configuration option ``[webserver] cookie_secure`` - * [x] Remove configuration option ``[webserver] analytics_tool`` - * [x] Remove configuration option ``[webserver] analytics_id`` - * [x] Remove configuration option ``[webserver] analytics_url`` - * [x] Remove configuration option ``[webserver] show_recent_stats_for_completed_runs`` - * [x] Remove configuration option ``[webserver] run_internal_api`` - * [x] Remove configuration option ``[webserver] caching_hash_method`` - * [x] Remove configuration option ``[webserver] show_trigger_form_if_no_params`` - * [x] Remove configuration option ``[webserver] num_recent_configurations_for_trigger`` - * [x] Remove configuration option ``[webserver] allowed_payload_size`` - * [x] Remove configuration option ``[webserver] max_form_memory_size`` - * [x] Remove configuration option ``[webserver] max_form_parts`` diff --git a/airflow-core/newsfragments/48218.significant.rst b/airflow-core/newsfragments/48218.significant.rst deleted file mode 100644 index cfde19d2fbec4..0000000000000 --- a/airflow-core/newsfragments/48218.significant.rst +++ /dev/null @@ -1,14 +0,0 @@ -Remove option for unlimited parallelism. - -Before Airflow 3.0 it was possible to set unlimited parallelism by setting ``[core] parallelism`` to 0. This was removed in Airflow 3.0.0. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/48223.significant.rst b/airflow-core/newsfragments/48223.significant.rst deleted file mode 100644 index 5aee91c591f19..0000000000000 --- a/airflow-core/newsfragments/48223.significant.rst +++ /dev/null @@ -1,30 +0,0 @@ -The Airflow distribution is now split into multiple distributions. - -In Airflow 2, all the dependencies were kept in the ``apache-airflow`` package were dynamically generated at -build time and some of the optional dependencies in the package were excessive due to limitation of the build -system used. - -With Airflow 3, Airflow is now split into several independent and isolated distribution packages on top of -already existing ``providers`` and the dependencies are isolated and simplified across those distribution -packages. - -While the original installation methods via ``apache-airflow`` distribution package and extras still -work as previously and it installs complete airflow installation ready to serve as scheduler, webserver, triggerer -and worker, the ``apache-airflow`` package is now a meta-package that cab install all the other distribution -packages (mandatory of via optional extras), it's also possible to install only the distribution -packages that are needed for a specific component you want to run airflow with. - -One change vs. Airflow 2 is that neither ``apache-airflow`` nor ``apache-airflow-core`` distribution packages -have ``leveldb`` extra that is an optional feature of ``apache-airflow-providers-google`` distribution package. -The simplest way to install ``leveldb`` dependencies is to install ``apache-airflow-providers-google[leveldb]`` - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [x] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/48388.significant.rst b/airflow-core/newsfragments/48388.significant.rst deleted file mode 100644 index cc60bb7686210..0000000000000 --- a/airflow-core/newsfragments/48388.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -Task-level auto lineage collection is removed - -The ``prepare_lineage``, ``apply_lineage`` mechanism, along with the custom -lineage backend type that supports it, has been removed. This has been an -experimental feature that never caught on. - -The ``airflow.lineage.hook`` submodule is not affected. - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes diff --git a/airflow-core/newsfragments/48458.significant.rst b/airflow-core/newsfragments/48458.significant.rst deleted file mode 100644 index 1f834d4395bd0..0000000000000 --- a/airflow-core/newsfragments/48458.significant.rst +++ /dev/null @@ -1,14 +0,0 @@ -For the Airflow CLI, we have changed the CLI parameter ``--exec-date`` to ``--logical-date`` for the command ``airflow dags trigger`` to maintain consistency with Airflow 3.0 and above. - -This is a breaking change for users using the ``airflow dags trigger`` command with the ``--exec-date`` parameter in their scripts or workflows. - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/48528.significant.rst b/airflow-core/newsfragments/48528.significant.rst deleted file mode 100644 index 252585a6eff3d..0000000000000 --- a/airflow-core/newsfragments/48528.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -``SequentialExecutor`` has been removed from the in-tree executors. - -This executor was primarily used for local testing but is now redundant, as ``LocalExecutor`` -supports SQLite with WAL mode and provides better performance with parallel execution. -Users should switch to ``LocalExecutor`` or ``CeleryExecutor`` as alternatives. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] Convert all ``SequentialExecutor`` to ``LocalExecutor`` in ``[core] executor`` diff --git a/airflow-core/newsfragments/48579.significant.rst b/airflow-core/newsfragments/48579.significant.rst deleted file mode 100644 index a67384b8c10be..0000000000000 --- a/airflow-core/newsfragments/48579.significant.rst +++ /dev/null @@ -1,22 +0,0 @@ -``DebugExecutor`` has been removed from the in-tree executors. - -This executor was primarily used for local testing but is now redundant, as ``LocalExecutor`` -supports SQLite with WAL mode and provides better performance with parallel execution. -Users should switch to ``LocalExecutor`` or ``CeleryExecutor`` as alternatives. - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config update`` - - * [x] Convert all ``DebugExecutor`` to ``LocalExecutor`` when present in ``[core] executor`` diff --git a/airflow-core/newsfragments/49017.significant.rst b/airflow-core/newsfragments/49017.significant.rst deleted file mode 100644 index 56a9b08831c53..0000000000000 --- a/airflow-core/newsfragments/49017.significant.rst +++ /dev/null @@ -1,25 +0,0 @@ -Renamed FAB related configuration. - - * Rename configuration ``webserver.config_file`` as ``fab.config_file`` - * Rename configuration ``webserver.session_backend`` as ``fab.session_backend`` - * Rename configuration ``webserver.base_url`` as ``api.base_url`` - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - - * ``airflow config lint`` - - * [x] ``webserver.config_file`` → ``fab.config_file`` - * [x] ``webserver.session_backend`` → ``fab.session_backend`` - * [x] ``webserver.base_url`` → ``api.base_url`` diff --git a/airflow-core/newsfragments/49161.significant.rst b/airflow-core/newsfragments/49161.significant.rst deleted file mode 100644 index 549d27bdfe30e..0000000000000 --- a/airflow-core/newsfragments/49161.significant.rst +++ /dev/null @@ -1,18 +0,0 @@ -Removed airflow configuration ``navbar_logo_text_color`` - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``webserver.navbar_logo_text_color`` diff --git a/airflow-core/newsfragments/49223.significant.rst b/airflow-core/newsfragments/49223.significant.rst deleted file mode 100644 index 090e079c2d309..0000000000000 --- a/airflow-core/newsfragments/49223.significant.rst +++ /dev/null @@ -1,36 +0,0 @@ -Update in ``airflow config update`` command to show breaking config changes by default. - - * By default, the ``airflow config update`` shows a dry run (i.e. it does not modify your ``airflow.cfg``) and displays only the breaking configuration changes. This helps users avoid being overwhelmed by non-critical recommendations. - * ``airflow config update --fix`` to applies only the breaking changes and updates airflow.cfg accordingly. - * ``airflow config update --fix --all-recommendations`` updates both breaking and non-breaking recommended changes in your configuration. - -* Breaking Migration rules created - - * ``airflow config update`` - - * ``core.executor``: default value change from ``SequentialExecutor`` to ``LocalExecutor`` - * ``logging.log_filename_template``: remove configuration if value equals either - ``{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log`` or - ``dag_id={{ ti.dag_id }}/run_id={{ ti.run_id }}/task_id={{ ti.task_id }}/{% if ti.map_index >= 0 %}map_index={{ ti.map_index }}/{% endif %}attempt={{ try_number }}.log`` - * ``webserver.web_server_host`` → ``api.host`` - * ``webserver.web_server_port`` → ``api.port`` - * ``webserver.workers`` → ``api.workers`` - * ``webserver.web_server_ssl_cert`` → ``api.ssl_cert`` - * ``webserver.web_server_ssl_key`` → ``api.ssl_key`` - * ``webserver.access_logfile`` → ``api.access_logfile`` - * ``scheduler.catchup_by_default``: default value change from ``True`` to ``False`` - * ``scheduler.dag_dir_list_interval`` → ``dag_processor.refresh_interval`` - * ``triggerer.default_capacity`` → ``triggerer.capacity`` - * ``elasticsearch.log_id_template``: remove configuration if value equals ``{dag_id}-{task_id}-{logical_date}-{try_number}`` - - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/49779.significant.rst b/airflow-core/newsfragments/49779.significant.rst new file mode 100644 index 0000000000000..187f6ac199198 --- /dev/null +++ b/airflow-core/newsfragments/49779.significant.rst @@ -0,0 +1,20 @@ +SecretCache class has been moved to ``airflow.sdk.execution_time.cache`` from ``airflow.secrets.cache`` + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [x] Code interface changes + +* Migration rules needed + + * ruff + + * AIR301 + + * [ ] ``airflow.secrets.cache.SecretCache`` → ``airflow.sdk.execution_time.cache.SecretCache`` diff --git a/airflow-core/newsfragments/50374.feature.rst b/airflow-core/newsfragments/50374.feature.rst new file mode 100644 index 0000000000000..f6a241d381942 --- /dev/null +++ b/airflow-core/newsfragments/50374.feature.rst @@ -0,0 +1 @@ +When a dag specifies ``schedule="@once"`` without an explicit ``start_date``, run it as soon as convenient. (Previously, the dag would never run.) diff --git a/airflow-core/newsfragments/50693.significant.rst b/airflow-core/newsfragments/50693.significant.rst new file mode 100644 index 0000000000000..6368ddf0ee56c --- /dev/null +++ b/airflow-core/newsfragments/50693.significant.rst @@ -0,0 +1,41 @@ +Unused webserver configuration options have been removed + +The following webserver options were moved into the ``api`` section: + +* ``[webserver] log_fetch_timeout_sec`` → ``[api] log_fetch_timeout_sec`` +* ``[webserver] hide_paused_dags_by_default`` → ``[api] hide_paused_dags_by_default`` +* ``[webserver] page_size`` → ``[api] page_size`` +* ``[webserver] default_wrap`` → ``[api] default_wrap`` +* ``[webserver] require_confirmation_dag_change`` → ``[api] require_confirmation_dag_change`` +* ``[webserver] auto_refresh_interval`` → ``[api] auto_refresh_interval`` + +The following configuration options are now unused and have been removed: + +- ``[webserver] instance_name_has_markup`` +- ``[webserver] warn_deployment_exposure`` + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes + +.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) + +* Migration rules needed + + * ``airflow config lint`` + + * [ ] Remove configuration option ``[webserver] instance_name_has_markup`` + * [ ] Remove configuration option ``[webserver] warn_deployment_exposure`` + * [ ] [webserver] log_fetch_timeout_sec`` → ``[api] log_fetch_timeout_sec`` + * [ ] [webserver] hide_paused_dags_by_default`` → ``[api] hide_paused_dags_by_default`` + * [ ] [webserver] page_size`` → ``[api] page_size`` + * [ ] [webserver] default_wrap`` → ``[api] default_wrap`` + * [ ] [webserver] require_confirmation_dag_change`` → ``[api] require_confirmation_dag_change`` + * [ ] [webserver] auto_refresh_interval`` → ``[api] auto_refresh_interval`` diff --git a/airflow-core/newsfragments/51424.significant.rst b/airflow-core/newsfragments/51424.significant.rst new file mode 100644 index 0000000000000..a04140ba74bb0 --- /dev/null +++ b/airflow-core/newsfragments/51424.significant.rst @@ -0,0 +1,17 @@ +The ``consuming_dags`` key in asset API has been renamed to ``scheduled_dags``. + +The previous name caused confusion to users since the list does not contain all +dags that technically *use* the asset, but only those that use it in their +``schedule`` argument. As a bug fix, the key has been renamed to clarify its +intention. + +* Types of change + + * [ ] Dag changes + * [ ] Config changes + * [x] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/airflow-core/newsfragments/51639.significant.rst b/airflow-core/newsfragments/51639.significant.rst new file mode 100644 index 0000000000000..78708b4eb1913 --- /dev/null +++ b/airflow-core/newsfragments/51639.significant.rst @@ -0,0 +1,17 @@ +``enable_xcom_deserialize_support`` configuration option has been removed. + +This configuration was previously marked as a security risk due to potential remote code execution vulnerabilities +when deserializing arbitrary Python objects that came in from XComs. The removal is a security improvement since +all custom XCom serialization/deserialization is now handled safely at the worker level, making this configuration +unnecessary in core. Users should migrate to not setting this configuration. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/airflow-core/newsfragments/52860.significant.rst b/airflow-core/newsfragments/52860.significant.rst new file mode 100644 index 0000000000000..5962897ec206d --- /dev/null +++ b/airflow-core/newsfragments/52860.significant.rst @@ -0,0 +1,17 @@ +Replace API server ``access_logfile`` configuration with ``log_config`` + +The API server configuration option ``[api] access_logfile`` has been replaced with ``[api] log_config`` to align with uvicorn's logging configuration instead of the legacy gunicorn approach. +The new ``log_config`` option accepts a path to a logging configuration file compatible with ``logging.config.fileConfig``, providing more flexible logging configuration for the API server. + +This change also removes the dependency on gunicorn for daemonization, making the API server ``--daemon`` option consistent with other Airflow components like scheduler and triggerer. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/airflow-core/newsfragments/53631.misc.rst b/airflow-core/newsfragments/53631.misc.rst new file mode 100644 index 0000000000000..a6cef457b5844 --- /dev/null +++ b/airflow-core/newsfragments/53631.misc.rst @@ -0,0 +1 @@ +The constraint do not contain developer dependencies as of Airflow 3.1.0 diff --git a/airflow-core/newsfragments/53796.misc.rst b/airflow-core/newsfragments/53796.misc.rst new file mode 100644 index 0000000000000..07c127679c5c4 --- /dev/null +++ b/airflow-core/newsfragments/53796.misc.rst @@ -0,0 +1 @@ +Add ``RemovedInAirflow4Warning`` warnings for ``airflow.security.permissions`` imports and ``access_control`` DAG attribute usage. diff --git a/airflow-core/newsfragments/aip-66.significant.rst b/airflow-core/newsfragments/aip-66.significant.rst deleted file mode 100644 index 3885b073b0db0..0000000000000 --- a/airflow-core/newsfragments/aip-66.significant.rst +++ /dev/null @@ -1,75 +0,0 @@ -Support DAG versioning by introducing DAG Bundles - -The following DAG parsing configuration options were moved into the ``dag_processor`` section: - -* ``[core] dag_file_processor_timeout`` → ``[dag_processor] dag_file_processor_timeout`` -* ``[scheduler] parsing_processes`` → ``[dag_processor] parsing_processes`` -* ``[scheduler] file_parsing_sort_mode`` → ``[dag_processor] file_parsing_sort_mode`` -* ``[scheduler] max_callbacks_per_loop`` → ``[dag_processor] max_callbacks_per_loop`` -* ``[scheduler] min_file_process_interval`` → ``[dag_processor] min_file_process_interval`` -* ``[scheduler] stale_dag_threshold`` → ``[dag_processor] stale_dag_threshold`` -* ``[scheduler] print_stats_interval`` → ``[dag_processor] print_stats_interval`` - -The following DAG parsing configuration options were moved into the ``logging`` section: - -* ``[scheduler] child_process_log_directory`` → ``[logging] dag_processor_child_process_log_directory`` - -The default value of ``[logging] dag_processor_child_process_log_directory`` was changed from -``AIRFLOW_HOME/logs/scheduler`` to ``AIRFLOW_HOME/logs/dag-processor``, which moves the parsing logs for dag files into -that new location. - -The "subdir" concept has been superseded by the "bundle" concept. Users are able to -define separate bundles for different DAG folders, and can refer to them by the bundle name -instead of their location on disk. - -The ``-subdir`` option of the following commands has been replaced with ``--bundle-name``: - -* ``airflow dag-processor`` -* ``airflow dags list-import-errors`` -* ``airflow dags report`` - -The ``--subdir`` option has been removed from the following commands: - -* ``airflow dags next-execution`` -* ``airflow dags pause`` -* ``airflow dags show`` -* ``airflow dags show-dependencies`` -* ``airflow dags state`` -* ``airflow dags test`` -* ``airflow dags trigger`` -* ``airflow dags unpause`` - -Dag bundles are not initialized in the triggerer. In practice, this means that triggers cannot come from a dag bundle. -This is because the triggerer does not deal with changes in trigger code over time, as everything happens in the main process. -Triggers can come from anywhere else on ``sys.path`` instead. - -.. Provide additional contextual information - -.. Check the type of change that applies to this change - -* Types of change - - * [ ] Dag changes - * [x] Config changes - * [ ] API changes - * [x] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -.. List the migration rules needed for this change (see https://github.com/apache/airflow/issues/41641) - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``[core] dag_file_processor_timeout`` → ``[dag_processor] dag_file_processor_timeout`` - * [x] ``[scheduler] parsing_processes`` → ``[dag_processor] parsing_processes`` - * [x] ``[scheduler] file_parsing_sort_mode`` → ``[dag_processor] file_parsing_sort_mode`` - * [x] ``[scheduler] max_callbacks_per_loop`` → ``[dag_processor] max_callbacks_per_loop`` - * [x] ``[scheduler] min_file_process_interval`` → ``[dag_processor] min_file_process_interval`` - * [x] ``[scheduler] stale_dag_threshold`` → ``[dag_processor] stale_dag_threshold`` - * [x] ``[scheduler] print_stats_interval`` → ``[dag_processor] print_stats_interval`` - * [x] ``[scheduler] dag_dir_list_interval`` → ``[dag_processor] refresh_interval`` - * [x] ``[scheduler] dag_dir_list_interval`` → ``[logging] dag_processor_child_process_log_directory`` diff --git a/airflow-core/newsfragments/aip-72.significant.rst b/airflow-core/newsfragments/aip-72.significant.rst deleted file mode 100644 index 863e19c8caee5..0000000000000 --- a/airflow-core/newsfragments/aip-72.significant.rst +++ /dev/null @@ -1,135 +0,0 @@ -Create a TaskExecution interface and enforce DB isolation from Tasks - -As part of this change the following breaking changes have occurred: - -- Tasks and DAG Parsing code is not able to access the Airflow metadata DB - - Access via Variables and Connection is still allowed (though these will use an API, not direct DB access) -- it should be assumed that any use of the database models from within ``airflow.models`` inside of DAG files or tasks will break. - -- Remove the concept of pluggable TaskRunners. - - The ``task_runner`` config in ``[core]`` has been removed. - - There were two build in options for this, Standard (the default) which used Fork or a new process as appropriate, and CGroupRunner to launch tasks in a new CGroup (not usable inside docker or Kubernetes). - - With the move of the execution time code into the TaskSDK we are using this opportunity to reduce complexity for seldom used features. - -- Shipping DAGs via pickle is no longer supported - - This was a feature that was not widely used and was a security risk. It has been removed. - -- Pickling is no longer supported for XCom serialization. - - XCom data will no longer support pickling. This change is intended to improve security and simplify data - handling by supporting JSON-only serialization. DAGs that depend on XCom pickling must update to use JSON-serializable data. - - As part of that change, ``[core] enable_xcom_pickling`` configuration option has been removed. - - If you still need to use pickling, you can use a custom XCom backend that stores references in the metadata DB and - the pickled data can be stored in a separate storage like S3. - - The ``value`` field in the XCom table has been changed to a ``JSON`` type via DB migration. The XCom records that - contains pickled data are archived in the ``_xcom_archive`` table. You can safely drop this table if you don't need - the data anymore. To drop the table, you can use the following command or manually drop the table from the database. - - .. code-block:: bash - - airflow db drop-archived -t "_xcom_archive" - -- The ability to specify scheduling conditions for an operator via the ``deps`` class attribute has been removed. - - If you were defining custom scheduling conditions on an operator class (usually by subclassing BaseTIDep) this ability has been removed. - - It is recommended that you replace such a custom operator with a deferrable sensor, a condition or another triggering mechanism. - -- ``BaseOperatorLink`` has now been moved into the task SDK to be consumed by DAG authors to write custom operator links. - - Any occurrences of imports from ``airflow.models.baseoperatorlink`` will need to be updated to ``airflow.sdk.definitions.baseoperatorlink`` - -- ``chain``, ``chain_linear`` and ``cross_downstream`` have been moved to the task SDK. - - Any occurrences of imports from ``airflow.models.baseoperator`` will need to be updated to ``airflow.sdk`` - - Old imports: - - .. code-block:: python - - from airflow.models.baseoperator import chain, chain_linear, cross_downstream - - New imports: - - .. code-block:: python - - from airflow.sdk import chain, chain_linear, cross_downstream - -- The ``Label`` class has been moved to the task SDK. - - Old imports: - - .. code-block:: python - - from airflow.utils.edgemodifier import Label - - New imports: - - .. code-block:: python - - from airflow.sdk import Label - -- We have removed DAG level settings that control the UI behaviour. - These are now as per-user settings controlled by the UI - - - ``default_view`` - -- The ``SkipMixin` class has been removed as a parent class from ``BaseSensorOperator``. - -- A new config ``[workers] secrets_backend[kwargs]`` & ``[workers] secrets_backend_kwargs`` has been introduced to configure secrets backend on the - workers directly to allow reducing the round trip to the API server and also to allow configuring a - different secrets backend. - Priority defined as workers backend > workers env > secrets backend on API server > API server env > metadata DB. - -- All the decorators have been moved to the task SDK. - - Old imports: - - .. code-block:: python - - from airflow.decorators import dag, task, task_group, setup, teardown - - New imports: - - .. code-block:: python - - from airflow.sdk import dag, task, task_group, setup, teardown - - -* Types of change - - * [x] Dag changes - * [x] Config changes - * [ ] API changes - * [ ] CLI changes - * [x] Behaviour changes - * [x] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``core.task_runner`` - * [x] ``core.enable_xcom_pickling`` - - * ruff - - * AIR302 - - * [x] ``airflow.models.baseoperatorlink.BaseOperatorLink`` → ``airflow.sdk.BaseOperatorLink`` - * [ ] ``airflow.models.dag.DAG`` → ``airflow.sdk.DAG`` - * [ ] ``airflow.models.DAG`` → ``airflow.sdk.DAG`` - * [ ] ``airflow.decorators.dag`` → ``airflow.sdk.dag`` - * [ ] ``airflow.decorators.task`` → ``airflow.sdk.task`` - * [ ] ``airflow.decorators.task_group`` → ``airflow.sdk.task_group`` - * [ ] ``airflow.decorators.setup`` → ``airflow.sdk.setup`` - * [ ] ``airflow.decorators.teardown`` → ``airflow.sdk.teardown`` diff --git a/airflow-core/newsfragments/aip-79.significant.rst b/airflow-core/newsfragments/aip-79.significant.rst deleted file mode 100644 index 5d7dba3057e65..0000000000000 --- a/airflow-core/newsfragments/aip-79.significant.rst +++ /dev/null @@ -1,65 +0,0 @@ -Remove Flask App Builder from core Airflow dependencies. - -As part of this change the following breaking changes have occurred: - -- The auth manager interface ``base_auth_manager`` have been updated with some breaking changes: - - - The constructor no longer take ``appbuilder`` as parameter. The constructor takes no parameter - - - A new abstract method ``deserialize_user`` needs to be implemented - - - A new abstract method ``serialize_user`` needs to be implemented - - - A new abstract method ``filter_authorized_menu_items`` needs to be implemented - - - The property ``security_manager`` has been removed from the interface - - - The method ``get_url_logout`` is now optional - - - The method ``get_permitted_dag_ids`` has been renamed ``get_authorized_dag_ids`` - - - The method ``filter_permitted_dag_ids`` has been renamed ``filter_authorized_dag_ids`` - - - All these methods have been removed from the interface: - - - ``filter_permitted_menu_items`` - - ``get_user_name`` - - ``get_user_display_name`` - - ``get_user`` - - ``get_user_id`` - - ``is_logged_in`` - - ``get_api_endpoints`` - - ``register_views`` - - - All the following method signatures changed to make the parameter ``user`` required (it was optional) - - - ``is_authorized_configuration`` - - ``is_authorized_connection`` - - ``is_authorized_dag`` - - ``is_authorized_asset`` - - ``is_authorized_pool`` - - ``is_authorized_variable`` - - ``is_authorized_view`` - - ``is_authorized_custom_view`` - - ``get_authorized_dag_ids`` (previously ``get_permitted_dag_ids``) - - ``filter_authorized_dag_ids`` (previously ``filter_permitted_dag_ids``) - - - All the following method signatures changed to add the parameter ``user`` - - - ``batch_is_authorized_connection`` - - ``batch_is_authorized_dag`` - - ``batch_is_authorized_pool`` - - ``batch_is_authorized_variable`` - -- The module ``airflow.www.auth`` has been moved to ``airflow.providers.fab.www.auth`` - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [x] Code interface changes diff --git a/airflow-core/newsfragments/aip-84.significant.rst b/airflow-core/newsfragments/aip-84.significant.rst deleted file mode 100644 index a783f607ff477..0000000000000 --- a/airflow-core/newsfragments/aip-84.significant.rst +++ /dev/null @@ -1,50 +0,0 @@ -Modernize and improve the REST API. - -As part of this change the following breaking changes have occurred: - -- The API returns 422 status code instead of 400 for validation errors. - - For instance when the request payload, path params, or query params are invalid. - -- When listing a resource for instance on GET ``/dags``, ``fields`` parameter is not supported anymore to obtain a partial response. - - The full objects will be returned by the endpoint. This feature might be added back in upcoming 3.x versions. - -- Passing list in query parameters switched from ``form, non exploded`` to ``form, exploded`` - i.e before ``?my_list=item1,item2`` now ``?my_list=item1&my_list=item2`` - -- ``execution_date`` was deprecated and has been removed. Any payload or parameter mentioning this field has been removed. - -- Datetime format are RFC3339-compliant in FastAPI, more permissive than ISO8601, - meaning that the API returns zulu datetime for responses, more info here https://github.com/fastapi/fastapi/discussions/7693#discussioncomment-5143311. - Both ``Z`` and ``00+xx`` are supported for payload and params. - - This is due FastAPI and pydantic v2 default behavior. - -- PATCH on ``DagRun`` and ``TaskInstance`` are more generic and allow in addition to update the resource state and the note content. - - Therefore the two legacy dedicated endpoints to update a ``DagRun`` note and ``TaskInstance`` note have been removed. - - Same for the set task instance state, it is now handled by the broader PATCH on task instances. - -- ``assets/queuedEvent`` endpoints have moved to ``assets/queuedEvents`` for consistency. - -- dag_parsing endpoint now returns a 409 when the DagPriorityParsingRequest already exists. It was returning 201 before. - -- ``clearTaskInstances`` endpoint default value for ``reset_dag_runs`` field has been updated from ``False`` to ``True``. - -- Pool name can't be modified in the PATCH pool endpoint anymore. Pool name shouldn't be updated via pool PATCH API call. - -- Logical date is now a nullable. In addition it is a nullable required payload field for Triggering a DagRun endpoint. - - -* Types of change - - * [ ] Dag changes - * [ ] Config changes - * [x] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes diff --git a/airflow-core/newsfragments/api-38.significant.rst b/airflow-core/newsfragments/api-38.significant.rst deleted file mode 100644 index 3167475a9aeaa..0000000000000 --- a/airflow-core/newsfragments/api-38.significant.rst +++ /dev/null @@ -1,31 +0,0 @@ -- With the new UI and we have removed DAG and config level settings that control some of the the UI behaviour. - - These are now as per-user settings controlled by the UI - - - ``default_view`` - - ``orientation`` - -* Types of change - - * [x] Dag changes - * [ ] Config changes - * [ ] API changes - * [ ] CLI changes - * [ ] Behaviour changes - * [ ] Plugin changes - * [ ] Dependency changes - * [ ] Code interface changes - -* Migration rules needed - - * ``airflow config lint`` - - * [x] ``core.dag_default_view`` - * [x] ``core.dag_orientation`` - - * ruff - - * AIR302 - - * [x] ``default_view`` argument to DAG removed - * [x] ``orientation`` argument to DAG removed diff --git a/airflow-core/newsfragments/template.significant.rst b/airflow-core/newsfragments/template.significant.rst index 4877b0cbd1e19..702670ebec03a 100644 --- a/airflow-core/newsfragments/template.significant.rst +++ b/airflow-core/newsfragments/template.significant.rst @@ -4,7 +4,7 @@ .. Check the type of change that applies to this change .. Dag changes: requires users to change their dag code -.. Config changes: requires users to change their airflow config +.. Config changes: requires users to change their Airflow config .. API changes: requires users to change their Airflow REST API calls .. CLI changes: requires users to change their Airflow CLI usage .. Behaviour changes: the existing code won't break, but the behavior is different diff --git a/airflow-core/pyproject.toml b/airflow-core/pyproject.toml index f7e1c620df5ac..a73382c2c720e 100644 --- a/airflow-core/pyproject.toml +++ b/airflow-core/pyproject.toml @@ -17,15 +17,15 @@ [build-system] requires = [ - "GitPython==3.1.44", + "GitPython==3.1.45", "gitdb==4.0.12", "hatchling==1.27.0", - "packaging==24.2", + "packaging==25.0", "pathspec==0.12.1", - "pluggy==1.5.0", + "pluggy==1.6.0", "smmap==5.0.2", "tomli==2.2.1; python_version < '3.11'", - "trove-classifiers==2025.4.11.15", + "trove-classifiers==2025.8.6.13", ] build-backend = "hatchling.build" @@ -35,7 +35,12 @@ name = "apache-airflow-core" description = "Core packages for Apache Airflow, schedule and API server" readme = { file = "README.md", content-type = "text/markdown" } license-files.globs = ["LICENSE", "3rd-party-licenses/*.txt", "NOTICE"] -requires-python = "~=3.9,<3.13" +# We know that it will take a while before we can support Python 3.14 because of all our dependencies +# It takes about 4-7 months after Python release before we can support it, so we limit it to <3.14 +# proactively. This way we also have a chance to test it with Python 3.14 and bump the upper binding +# and manually mark providers that do not support it yet with !-3.14 - until they support it - which will +# also exclude resolving uv workspace dependencies for those providers. +requires-python = ">=3.10,<3.14" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -51,7 +56,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -59,7 +63,7 @@ classifiers = [ ] # Version is defined in src/airflow/__init__.py and it is automatically synchronized by pre-commit -version = "3.0.0" +version = "3.1.0" dependencies = [ "a2wsgi>=1.10.8", @@ -78,34 +82,34 @@ dependencies = [ "cryptography>=41.0.0", "deprecated>=1.2.13", "dill>=0.2.2", - # Required for python 3.9 to work with new annotations styles. Check package - # description on PyPI for more details: https://pypi.org/project/eval-type-backport/ - 'eval-type-backport>=0.2.0;python_version<"3.10"', - # 0.115.10 fastapi was a bad release that broke our API's and static checks. - # Related fastapi issue here: https://github.com/fastapi/fastapi/discussions/13431 - "fastapi[standard]>=0.115.0,!=0.115.10", - # We could get rid of flask and gunicorn if we replace serve_logs with a starlette + unicorn - "flask>=2.1.1", - "gitpython>=3.1.40", - # We could get rid of flask and gunicorn if we replace serve_logs with a starlette + unicorn - "gunicorn>=20.1.0", + "fastapi[standard-no-fastapi-cloud-cli]>=0.116.0", + "starlette>=0.45.0", "httpx>=0.25.0", 'importlib_metadata>=6.5;python_version<"3.12"', + 'importlib_metadata>=7.0;python_version>="3.12"', "itsdangerous>=2.0", "jinja2>=3.1.5", "jsonschema>=4.19.1", "lazy-object-proxy>=1.2.0", - "libcst >=1.1.0", + 'libcst >=1.8.2', "linkify-it-py>=2.0.0", "lockfile>=0.12.2", - "marshmallow-oneofschema>=2.0.1", "methodtools>=0.4.7", - "opentelemetry-api>=1.24.0", - "opentelemetry-exporter-otlp>=1.24.0", - "packaging>=23.2", + "opentelemetry-api>=1.27.0", + "opentelemetry-exporter-otlp>=1.27.0", + # opentelemetry-proto is a transitive dependency of + # opentelemetry-exporter-otlp and other OpenTelemetry packages. + # opentelemetry-proto adds a very restrictive dependency on + # protobuf, causing conflicts with other packages, so to help + # the pip resolver we add it as a direct dependency with an upper + # bound, which signals to the pip resolver it is a problematic + # dependency and should be resolved as early as possible. + # This may be removed when future versions of pip are able + # to handle this dependency resolution automatically. + "opentelemetry-proto<9999,>=1.27.0", + "packaging>=25.0", "pathspec>=0.9.0", - 'pendulum>=2.1.2,<4.0;python_version<"3.12"', - 'pendulum>=3.0.0,<4.0;python_version>="3.12"', + 'pendulum>=3.1.0', "pluggy>=1.5.0", "psutil>=5.8.0", "pydantic>=2.11.0", @@ -117,111 +121,63 @@ dependencies = [ "python-dateutil>=2.7.0", "python-slugify>=5.0", # Requests 3 if it will be released, will be heavily breaking. - "requests>=2.31.0,<3", + "requests>=2.32.0,<3", "rich-argparse>=1.0.0", "rich>=13.6.0", "setproctitle>=1.3.3", - # We use some deprecated features of sqlalchemy 2.0 and we should replace them before we can upgrade - # See https://sqlalche.me/e/b8d9 for details of deprecated features - # you can set environment variable SQLALCHEMY_WARN_20=1 to show all deprecation warnings. - # The issue tracking it is https://github.com/apache/airflow/issues/28723 - "sqlalchemy>=1.4.49,<2.0", + # The issue tracking deprecations for sqlalchemy 2 is https://github.com/apache/airflow/issues/28723 + "sqlalchemy[asyncio]>=1.4.49", "sqlalchemy-jsonfield>=1.0", "sqlalchemy-utils>=0.41.2", "svcs>=25.1.0", "tabulate>=0.9.0", - "tenacity>=8.0.0,!=8.2.0", + "tenacity>=8.3.0", "termcolor>=3.0.0", + "typing-extensions>=4.14.1", # Universal Pathlib 0.2.4 adds extra validation for Paths and our integration with local file paths # Does not work with it Tracked in https://github.com/fsspec/universal_pathlib/issues/276 "universal-pathlib>=0.2.2,!=0.2.4", "uuid6>=2024.7.10", - "apache-airflow-task-sdk<1.1.0,>=1.0.0", + "apache-airflow-task-sdk<1.2.0,>=1.1.0", # pre-installed providers "apache-airflow-providers-common-compat>=1.6.0", "apache-airflow-providers-common-io>=1.5.3", - "apache-airflow-providers-common-sql>=1.25.0", + "apache-airflow-providers-common-sql>=1.26.0", "apache-airflow-providers-smtp>=2.0.2", "apache-airflow-providers-standard>=0.4.0", ] [project.optional-dependencies] -# Aiobotocore required for AWS deferrable operators. -# There is conflict between boto3 and aiobotocore dependency botocore. -# TODO: We can remove it once boto3 and aiobotocore both have compatible botocore version or -# boto3 have native aync support and we move away from aio aiobotocore -"aiobotocore" = [ - "apache-airflow-providers-amazon[aiobotocore]>=9.6.0", -] "async" = [ "eventlet>=0.37.0", - "gevent>=24.2.1", - "greenlet>=0.4.9", -] -"apache-atlas" = [ - "atlasclient>=0.1.2", -] -"apache-webhdfs" = [ - "apache-airflow-providers-apache-hdfs", -] -"cloudpickle" = [ - "cloudpickle>=2.2.1", -] -"github-enterprise" = [ - "apache-airflow-providers-fab", - "authlib>=1.0.0", -] -"google-auth" = [ - "apache-airflow-providers-fab", - "authlib>=1.0.0", + "gevent>=25.4.1", + "greenlet>=3.1.0", ] "graphviz" = [ # The graphviz package creates friction when installing on MacOS as it needs graphviz system package to # be installed, and it's really only used for very obscure features of Airflow, so we can skip it on MacOS # Instead, if someone attempts to use it on MacOS, they will get explanatory error on how to install it - "graphviz>=0.12; sys_platform != 'darwin'", + "graphviz>=0.20; sys_platform != 'darwin'", ] "kerberos" = [ "pykerberos>=1.1.13", "requests-kerberos>=0.14.0", "thrift-sasl>=0.4.2", ] -"ldap" = [ - "python-ldap>=3.4.4", -] "otel" = [ "opentelemetry-exporter-prometheus>=0.47b0", ] - -"pandas" = [ - # In pandas 2.2 minimal version of the sqlalchemy is 2.0 - # https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#increased-minimum-versions-for-dependencies - # However Airflow not fully supports it yet: https://github.com/apache/airflow/issues/28723 - "pandas>=2.1.2,<2.3", -] -"rabbitmq" = [ - "amqp>=5.2.0", -] -"s3fs" = [ - # This is required for support of S3 file system which uses aiobotocore - # which can have a conflict with boto3 as mentioned in aiobotocore extra - "apache-airflow-providers-amazon[s3fs]", -] "sentry" = [ "blinker>=1.1", - # Sentry SDK 1.33 is broken when greenlets are installed and fails to import - # See https://github.com/getsentry/sentry-python/issues/2473 - "sentry-sdk>=1.32.0,!=1.33.0", + # Apparently sentry needs flask to be installed to work properly + "sentry-sdk[flask]>=2.30.0", ] "statsd" = [ "statsd>=3.3.0", ] -"uv" = [ - "uv>=0.6.13", -] "all" = [ - "apache-airflow-core[aiobotocore,apache-atlas,apache-webhdfs,cloudpickle,github-enterprise,google-auth,graphviz,kerberos,ldap,otel,pandas,rabbitmq,s3fs,sentry,statsd,uv]" + "apache-airflow-core[graphviz,kerberos,otel,sentry,statsd]" ] [project.scripts] @@ -240,7 +196,6 @@ Mastodon = "https://fosstodon.org/@airflow" Bluesky = "https://bsky.app/profile/apache-airflow.bsky.social" YouTube = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/" - [tool.hatch.version] path = "src/airflow/__init__.py" @@ -252,8 +207,12 @@ include = [ exclude = [ "src/airflow/ui/node_modules/", "src/airflow/api_fastapi/auth/managers/simple/ui/node_modules", + "src/airflow/ui/openapi.merged.json", ] +[tool.hatch.build.targets.sdist.force-include] +"../shared/timezones/src/airflow_shared/timezones" = "src/airflow/_shared/timezones" + [tool.hatch.build.targets.custom] path = "./hatch_build.py" @@ -274,6 +233,7 @@ artifacts = [ exclude = [ "src/airflow/ui/node_modules/", "src/airflow/api_fastapi/auth/managers/simple/ui/node_modules", + "src/airflow/ui/openapi.merged.json", ] [dependency-groups] @@ -282,11 +242,14 @@ dev = [ "apache-airflow-ctl", "apache-airflow-devel-common", "apache-airflow-task-sdk", + # TODO(potiuk): eventually we do not want any providers nor apache-airflow extras to be needed for + # airflow-core tests + "apache-airflow[pandas,polars]", "apache-airflow-providers-amazon", "apache-airflow-providers-celery", "apache-airflow-providers-cncf-kubernetes", + "apache-airflow-providers-fab>=2.2.0; python_version < '3.13'", "apache-airflow-providers-git", - # TODO(potiuk): check if this is really needed "apache-airflow-providers-ftp", ] diff --git a/airflow-core/src/airflow/__init__.py b/airflow-core/src/airflow/__init__.py index 6a997cecbdfef..229b068c76075 100644 --- a/airflow-core/src/airflow/__init__.py +++ b/airflow-core/src/airflow/__init__.py @@ -23,9 +23,10 @@ # Make `airflow` a namespace package, supporting installing # airflow.providers.* in different locations (i.e. one in site, and one in user # lib.) This is required by some IDEs to resolve the import paths. -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) + +__version__ = "3.1.0" -__version__ = "3.0.0" import os import sys @@ -85,7 +86,7 @@ "version": (".version", "", False), # Deprecated lazy imports "AirflowException": (".exceptions", "AirflowException", True), - "Dataset": (".sdk.definitions.asset", "Dataset", True), + "Dataset": (".sdk.definitions.asset", "Asset", True), } if TYPE_CHECKING: # These objects are imported by PEP-562, however, static analyzers and IDE's diff --git a/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/router/__init__.py b/airflow-core/src/airflow/_shared/__init__.py similarity index 100% rename from providers/amazon/src/airflow/providers/amazon/aws/auth_manager/router/__init__.py rename to airflow-core/src/airflow/_shared/__init__.py diff --git a/airflow-core/src/airflow/_shared/timezones b/airflow-core/src/airflow/_shared/timezones new file mode 120000 index 0000000000000..8d7034fa71347 --- /dev/null +++ b/airflow-core/src/airflow/_shared/timezones @@ -0,0 +1 @@ +../../../../shared/timezones/src/airflow_shared/timezones \ No newline at end of file diff --git a/airflow-core/src/airflow/api/client/local_client.py b/airflow-core/src/airflow/api/client/local_client.py index db93f5a52098b..25e42008d216e 100644 --- a/airflow-core/src/airflow/api/client/local_client.py +++ b/airflow-core/src/airflow/api/client/local_client.py @@ -36,11 +36,18 @@ def __init__(self, auth=None, session: httpx.Client | None = None): self._session.auth = auth def trigger_dag( - self, dag_id, run_id=None, conf=None, logical_date=None, replace_microseconds=True + self, + dag_id, + run_id=None, + conf=None, + logical_date=None, + triggering_user_name=None, + replace_microseconds=True, ) -> dict | None: dag_run = trigger_dag.trigger_dag( dag_id=dag_id, triggered_by=DagRunTriggeredByType.CLI, + triggering_user_name=triggering_user_name, run_id=run_id, conf=conf, logical_date=logical_date, @@ -59,6 +66,7 @@ def trigger_dag( "run_type": dag_run.run_type, "start_date": dag_run.start_date, "state": dag_run.state, + "triggering_user_name": dag_run.triggering_user_name, } return dag_run diff --git a/airflow-core/src/airflow/api/common/delete_dag.py b/airflow-core/src/airflow/api/common/delete_dag.py index 2f6076252ba97..a617bcdc5c61c 100644 --- a/airflow-core/src/airflow/api/common/delete_dag.py +++ b/airflow-core/src/airflow/api/common/delete_dag.py @@ -26,8 +26,9 @@ from airflow import models from airflow.exceptions import AirflowException, DagNotFound -from airflow.models import DagModel +from airflow.models import DagModel, DagRun from airflow.models.errors import ParseImportError +from airflow.models.taskinstance import TaskInstance from airflow.utils.db import get_sqla_model_classes from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import TaskInstanceState @@ -63,9 +64,14 @@ def delete_dag(dag_id: str, keep_records_in_log: bool = True, session: Session = if dag is None: raise DagNotFound(f"Dag id {dag_id} not found") - count = 0 + # To ensure the TaskInstance and DagRun model is deleted before + # each of the model DagVersion and BackFill respectively. + models_for_deletion = [TaskInstance, DagRun] + [ + model for model in get_sqla_model_classes() if model.__name__ not in ["TaskInstance", "DagRun"] + ] - for model in get_sqla_model_classes(): + count = 0 + for model in models_for_deletion: if hasattr(model, "dag_id") and (not keep_records_in_log or model.__name__ != "Log"): count += session.execute( delete(model).where(model.dag_id == dag_id).execution_options(synchronize_session="fetch") @@ -76,7 +82,7 @@ def delete_dag(dag_id: str, keep_records_in_log: bool = True, session: Session = session.execute( delete(ParseImportError) .where( - ParseImportError.filename == dag.fileloc, + ParseImportError.filename == dag.relative_fileloc, ParseImportError.bundle_name == dag.bundle_name, ) .execution_options(synchronize_session="fetch") diff --git a/airflow-core/src/airflow/api/common/mark_tasks.py b/airflow-core/src/airflow/api/common/mark_tasks.py index c957a5cd53ab3..fe02e3b462636 100644 --- a/airflow-core/src/airflow/api/common/mark_tasks.py +++ b/airflow-core/src/airflow/api/common/mark_tasks.py @@ -20,7 +20,7 @@ from __future__ import annotations from collections.abc import Collection, Iterable -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, TypeAlias, cast from sqlalchemy import and_, or_, select from sqlalchemy.orm import lazyload @@ -34,7 +34,10 @@ from sqlalchemy.orm import Session as SASession from airflow.models.dag import DAG - from airflow.models.operator import Operator + from airflow.models.mappedoperator import MappedOperator + from airflow.serialization.serialized_objects import SerializedBaseOperator + + Operator: TypeAlias = MappedOperator | SerializedBaseOperator @provide_session @@ -215,17 +218,34 @@ def set_dag_run_state_to_success( if not run_id: raise ValueError(f"Invalid dag_run_id: {run_id}") - # Mark all task instances of the dag run to success - except for teardown as they need to complete work. - normal_tasks = [task for task in dag.tasks if not task.is_teardown] + # TODO (GH-52141): 'tasks' in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + tasks = cast("list[Operator]", dag.tasks) + + # Mark all task instances of the dag run to success - except for unfinished teardown as they need to complete work. + teardown_tasks = [task for task in tasks if task.is_teardown] + unfinished_teardown_task_ids = set( + session.scalars( + select(TaskInstance.task_id).where( + TaskInstance.dag_id == dag.dag_id, + TaskInstance.run_id == run_id, + TaskInstance.task_id.in_(task.task_id for task in teardown_tasks), + or_(TaskInstance.state.is_(None), TaskInstance.state.in_(State.unfinished)), + ) + ) + ) - # Mark the dag run to success. - if commit and len(normal_tasks) == len(dag.tasks): + # Mark the dag run to success if there are no unfinished teardown tasks. + if commit and len(unfinished_teardown_task_ids) == 0: _set_dag_run_state(dag.dag_id, run_id, DagRunState.SUCCESS, session) - for task in normal_tasks: + tasks_to_mark_success = [task for task in tasks if not task.is_teardown] + [ + task for task in teardown_tasks if task.task_id not in unfinished_teardown_task_ids + ] + for task in tasks_to_mark_success: task.dag = dag return set_state( - tasks=normal_tasks, + tasks=tasks_to_mark_success, run_id=run_id, state=TaskInstanceState.SUCCESS, commit=commit, @@ -276,13 +296,19 @@ def set_dag_run_state_to_failed( ).all() # Do not kill teardown tasks - task_ids_of_running_tis = [ti.task_id for ti in running_tis if not dag.task_dict[ti.task_id].is_teardown] + task_ids_of_running_tis = {ti.task_id for ti in running_tis if not dag.task_dict[ti.task_id].is_teardown} - running_tasks = [] - for task in dag.tasks: - if task.task_id in task_ids_of_running_tis: - task.dag = dag - running_tasks.append(task) + def _set_runing_task(task: Operator) -> Operator: + task.dag = dag + return task + + # TODO (GH-52141): 'tasks' in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + running_tasks = [ + _set_runing_task(task) + for task in cast("list[Operator]", dag.tasks) + if task.task_id in task_ids_of_running_tis + ] # Mark non-finished tasks as SKIPPED. pending_tis: list[TaskInstance] = session.scalars( diff --git a/airflow-core/src/airflow/api/common/trigger_dag.py b/airflow-core/src/airflow/api/common/trigger_dag.py index 28ab45975d7a0..d1cf5d93b8e51 100644 --- a/airflow-core/src/airflow/api/common/trigger_dag.py +++ b/airflow-core/src/airflow/api/common/trigger_dag.py @@ -22,9 +22,9 @@ import json from typing import TYPE_CHECKING +from airflow._shared.timezones import timezone from airflow.exceptions import DagNotFound, DagRunAlreadyExists from airflow.models import DagBag, DagModel, DagRun -from airflow.utils import timezone from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -41,6 +41,7 @@ def _trigger_dag( dag_bag: DagBag, *, triggered_by: DagRunTriggeredByType, + triggering_user_name: str | None = None, run_after: datetime | None = None, run_id: str | None = None, conf: dict | str | None = None, @@ -54,7 +55,8 @@ def _trigger_dag( :param dag_id: DAG ID :param dag_bag: DAG Bag model :param triggered_by: the entity which triggers the dag_run - :param run_after: the datetime before which dag cannot run. + :param triggering_user_name: the user name who triggers the dag_run + :param run_after: the datetime before which dag cannot run :param run_id: ID of the run :param conf: configuration :param logical_date: logical date of the run @@ -67,6 +69,7 @@ def _trigger_dag( raise DagNotFound(f"Dag id {dag_id} not found") run_after = run_after or timezone.coerce_datetime(timezone.utcnow()) + coerced_logical_date: datetime | None = None if logical_date: if not timezone.is_localized(logical_date): raise ValueError("The logical date should be localized") @@ -84,7 +87,6 @@ def _trigger_dag( coerced_logical_date = timezone.coerce_datetime(logical_date) data_interval = dag.timetable.infer_manual_data_interval(run_after=run_after) else: - coerced_logical_date = None data_interval = None run_id = run_id or DagRun.generate_run_id( @@ -111,6 +113,7 @@ def _trigger_dag( conf=run_conf, run_type=DagRunType.MANUAL, triggered_by=triggered_by, + triggering_user_name=triggering_user_name, state=DagRunState.QUEUED, session=session, ) @@ -123,6 +126,7 @@ def trigger_dag( dag_id: str, *, triggered_by: DagRunTriggeredByType, + triggering_user_name: str | None = None, run_after: datetime | None = None, run_id: str | None = None, conf: dict | str | None = None, @@ -135,7 +139,8 @@ def trigger_dag( :param dag_id: DAG ID :param triggered_by: the entity which triggers the dag_run - :param run_after: the datetime before which dag won't run. + :param triggering_user_name: the user name who triggers the dag_run + :param run_after: the datetime before which dag won't run :param run_id: ID of the dag_run :param conf: configuration :param logical_date: date of execution @@ -157,6 +162,7 @@ def trigger_dag( logical_date=logical_date, replace_microseconds=replace_microseconds, triggered_by=triggered_by, + triggering_user_name=triggering_user_name, session=session, ) diff --git a/airflow-core/src/airflow/api_fastapi/app.py b/airflow-core/src/airflow/api_fastapi/app.py index b11cf6824f5d8..f515be6992c60 100644 --- a/airflow-core/src/airflow/api_fastapi/app.py +++ b/airflow-core/src/airflow/api_fastapi/app.py @@ -24,14 +24,15 @@ from fastapi import FastAPI from starlette.routing import Mount +from airflow.api_fastapi.common.dagbag import create_dag_bag from airflow.api_fastapi.core_api.app import ( init_config, init_error_handlers, init_flask_plugins, init_middlewares, + init_ui_plugins, init_views, ) -from airflow.api_fastapi.core_api.init_dagbag import get_dag_bag from airflow.api_fastapi.execution_api.app import create_task_execution_api_app from airflow.configuration import conf from airflow.exceptions import AirflowConfigException @@ -80,7 +81,7 @@ def create_app(apps: str = "all") -> FastAPI: version="2", ) - dag_bag = get_dag_bag() + dag_bag = create_dag_bag() if "execution" in apps_list or "all" in apps_list: task_exec_api_app = create_task_execution_api_app() @@ -93,6 +94,7 @@ def create_app(apps: str = "all") -> FastAPI: init_plugins(app) init_auth_manager(app) init_flask_plugins(app) + init_ui_plugins(app) init_views(app) # Core views need to be the last routes added - it has a catch all route init_error_handlers(app) init_middlewares(app) @@ -111,9 +113,10 @@ def cached_app(config=None, testing=False, apps="all") -> FastAPI: def purge_cached_app() -> None: - """Remove the cached version of the app in global state.""" - global app + """Remove the cached version of the app and auth_manager in global state.""" + global app, auth_manager app = None + auth_manager = None def get_auth_manager_cls() -> type[BaseAuthManager]: @@ -166,7 +169,7 @@ def get_auth_manager() -> BaseAuthManager: def init_plugins(app: FastAPI) -> None: - """Integrate FastAPI app and middleware plugins.""" + """Integrate FastAPI app, middlewares and UI plugins.""" from airflow import plugins_manager plugins_manager.initialize_fastapi_plugins() @@ -186,6 +189,7 @@ def init_plugins(app: FastAPI) -> None: log.debug("Adding subapplication %s under prefix %s", name, url_prefix) app.mount(url_prefix, subapp) + # After calling initialize_fastapi_plugins, fastapi_root_middlewares cannot be None anymore. for middleware_dict in cast("list", plugins_manager.fastapi_root_middlewares): name = middleware_dict.get("name") middleware = middleware_dict.get("middleware") diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py b/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py index 629d9862164d7..8e7ef8573f2dc 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py @@ -20,7 +20,7 @@ import logging from abc import ABCMeta, abstractmethod from functools import cache -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Generic, Literal, TypeVar from jwt import InvalidTokenError from sqlalchemy import select @@ -36,7 +36,6 @@ from airflow.api_fastapi.common.types import ExtraMenuItem, MenuItem from airflow.configuration import conf from airflow.models import DagModel -from airflow.typing_compat import Literal from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.session import NEW_SESSION, provide_session @@ -47,10 +46,7 @@ from sqlalchemy.orm import Session from airflow.api_fastapi.auth.managers.models.batch_apis import ( - IsAuthorizedConnectionRequest, IsAuthorizedDagRequest, - IsAuthorizedPoolRequest, - IsAuthorizedVariableRequest, ) from airflow.api_fastapi.auth.managers.models.resource_details import ( AccessView, @@ -66,7 +62,10 @@ # This cannot be in the TYPE_CHECKING block since some providers import it globally. # TODO: Move this inside once all providers drop Airflow 2.x support. -ResourceMethod = Literal["GET", "POST", "PUT", "DELETE", "MENU"] +# List of methods (or actions) a user can do against a resource +ResourceMethod = Literal["GET", "POST", "PUT", "DELETE"] +# Extends ``ResourceMethod`` to include "MENU". The method "MENU" is only supported with specific resources (menu items) +ExtendedResourceMethod = Literal["GET", "POST", "PUT", "DELETE", "MENU"] log = logging.getLogger(__name__) T = TypeVar("T", bound=BaseUser) @@ -133,6 +132,15 @@ def get_url_logout(self) -> str | None: """ return None + def get_url_refresh(self) -> str | None: + """ + Return the URL to refresh the authentication token. + + This is used to refresh the authentication token when it expires. + The default implementation returns None, which means that the auth manager does not support refresh token. + """ + return None + @abstractmethod def is_authorized_configuration( self, @@ -304,27 +312,6 @@ def filter_authorized_menu_items(self, menu_items: list[MenuItem], *, user: T) - :param user: the user """ - def batch_is_authorized_connection( - self, - requests: Sequence[IsAuthorizedConnectionRequest], - *, - user: T, - ) -> bool: - """ - Batch version of ``is_authorized_connection``. - - By default, calls individually the ``is_authorized_connection`` API on each item in the list of - requests, which can lead to some poor performance. It is recommended to override this method in the auth - manager implementation to provide a more efficient implementation. - - :param requests: a list of requests containing the parameters for ``is_authorized_connection`` - :param user: the user to performing the action - """ - return all( - self.is_authorized_connection(method=request["method"], details=request.get("details"), user=user) - for request in requests - ) - def batch_is_authorized_dag( self, requests: Sequence[IsAuthorizedDagRequest], @@ -351,48 +338,6 @@ def batch_is_authorized_dag( for request in requests ) - def batch_is_authorized_pool( - self, - requests: Sequence[IsAuthorizedPoolRequest], - *, - user: T, - ) -> bool: - """ - Batch version of ``is_authorized_pool``. - - By default, calls individually the ``is_authorized_pool`` API on each item in the list of - requests. Can lead to some poor performance. It is recommended to override this method in the auth - manager implementation to provide a more efficient implementation. - - :param requests: a list of requests containing the parameters for ``is_authorized_pool`` - :param user: the user to performing the action - """ - return all( - self.is_authorized_pool(method=request["method"], details=request.get("details"), user=user) - for request in requests - ) - - def batch_is_authorized_variable( - self, - requests: Sequence[IsAuthorizedVariableRequest], - *, - user: T, - ) -> bool: - """ - Batch version of ``is_authorized_variable``. - - By default, calls individually the ``is_authorized_variable`` API on each item in the list of - requests. Can lead to some poor performance. It is recommended to override this method in the auth - manager implementation to provide a more efficient implementation. - - :param requests: a list of requests containing the parameters for ``is_authorized_variable`` - :param user: the user to performing the action - """ - return all( - self.is_authorized_variable(method=request["method"], details=request.get("details"), user=user) - for request in requests - ) - @provide_session def get_authorized_dag_ids( self, diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py b/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py index 2fe11b659af6e..5acdd3edee5f2 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py @@ -22,38 +22,14 @@ if TYPE_CHECKING: from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod from airflow.api_fastapi.auth.managers.models.resource_details import ( - ConnectionDetails, DagAccessEntity, DagDetails, - PoolDetails, - VariableDetails, ) -class IsAuthorizedConnectionRequest(TypedDict, total=False): - """Represent the parameters of ``is_authorized_connection`` API in the auth manager.""" - - method: ResourceMethod - details: ConnectionDetails | None - - class IsAuthorizedDagRequest(TypedDict, total=False): """Represent the parameters of ``is_authorized_dag`` API in the auth manager.""" method: ResourceMethod access_entity: DagAccessEntity | None details: DagDetails | None - - -class IsAuthorizedPoolRequest(TypedDict, total=False): - """Represent the parameters of ``is_authorized_pool`` API in the auth manager.""" - - method: ResourceMethod - details: PoolDetails | None - - -class IsAuthorizedVariableRequest(TypedDict, total=False): - """Represent the parameters of ``is_authorized_variable`` API in the auth manager.""" - - method: ResourceMethod - details: VariableDetails | None diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/models/resource_details.py b/airflow-core/src/airflow/api_fastapi/auth/managers/models/resource_details.py index 7aefccab703bb..038f82b30672f 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/models/resource_details.py +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/models/resource_details.py @@ -20,6 +20,8 @@ from dataclasses import dataclass from enum import Enum +from pydantic import NonNegativeInt + @dataclass class ConfigurationDetails: @@ -46,7 +48,7 @@ class DagDetails: class BackfillDetails: """Represents the details of a backfill.""" - id: str | None = None + id: NonNegativeInt | None = None @dataclass @@ -96,12 +98,11 @@ class DagAccessEntity(Enum): AUDIT_LOG = "AUDIT_LOG" CODE = "CODE" DEPENDENCIES = "DEPENDENCIES" + HITL_DETAIL = "HITL_DETAIL" RUN = "RUN" - SLA_MISS = "SLA_MISS" TASK = "TASK" TASK_INSTANCE = "TASK_INSTANCE" TASK_LOGS = "TASK_LOGS" - TASK_RESCHEDULE = "TASK_RESCHEDULE" VERSION = "VERSION" WARNING = "WARNING" XCOM = "XCOM" diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/middleware.py b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/middleware.py new file mode 100644 index 0000000000000..536442cf5bf19 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/middleware.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from fastapi import Request +from starlette.middleware.base import BaseHTTPMiddleware + +from airflow.api_fastapi.auth.managers.simple.services.login import SimpleAuthManagerLogin + + +class SimpleAllAdminMiddleware(BaseHTTPMiddleware): + """Middleware that automatically generates and includes auth header for simple auth manager.""" + + async def dispatch(self, request: Request, call_next): + token = SimpleAuthManagerLogin.create_token_all_admins() + request.scope["headers"].append((b"authorization", f"Bearer {token}".encode())) + return await call_next(request) diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v1-simple-auth-manager-generated.yaml b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v1-simple-auth-manager-generated.yaml deleted file mode 100644 index 31d3ebdc19d94..0000000000000 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v1-simple-auth-manager-generated.yaml +++ /dev/null @@ -1,193 +0,0 @@ -openapi: 3.1.0 -info: - title: Simple auth manager sub application - description: This is the simple auth manager fastapi sub application. This API is - only available if the auth manager used in the Airflow environment is simple auth - manager. This sub application provides the login form for users to log in. - version: 0.1.0 -paths: - /auth/token: - get: - tags: - - SimpleAuthManagerLogin - summary: Create Token All Admins - description: Create a token with no credentials only if ``simple_auth_manager_all_admins`` - is True. - operationId: create_token_all_admins - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/LoginResponse' - '403': - description: Forbidden - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - post: - tags: - - SimpleAuthManagerLogin - summary: Create Token - description: Authenticate the user. - operationId: create_token - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/LoginBody' - required: true - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/LoginResponse' - '400': - description: Bad Request - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '401': - description: Unauthorized - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /auth/token/login: - get: - tags: - - SimpleAuthManagerLogin - summary: Login All Admins - description: Login the user with no credentials. - operationId: login_all_admins - responses: - '307': - description: Successful Response - content: - application/json: - schema: {} - '403': - description: Forbidden - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - /auth/token/cli: - post: - tags: - - SimpleAuthManagerLogin - summary: Create Token Cli - description: Authenticate the user for the CLI. - operationId: create_token_cli - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/LoginBody' - required: true - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/LoginResponse' - '400': - description: Bad Request - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '401': - description: Unauthorized - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' -components: - schemas: - HTTPExceptionResponse: - properties: - detail: - anyOf: - - type: string - - additionalProperties: true - type: object - title: Detail - type: object - required: - - detail - title: HTTPExceptionResponse - description: HTTPException Model used for error response. - HTTPValidationError: - properties: - detail: - items: - $ref: '#/components/schemas/ValidationError' - type: array - title: Detail - type: object - title: HTTPValidationError - LoginBody: - properties: - username: - type: string - title: Username - password: - type: string - title: Password - additionalProperties: false - type: object - required: - - username - - password - title: LoginBody - description: Login serializer for post bodies. - LoginResponse: - properties: - access_token: - type: string - title: Access Token - type: object - required: - - access_token - title: LoginResponse - description: Login serializer for responses. - ValidationError: - properties: - loc: - items: - anyOf: - - type: string - - type: integer - type: array - title: Location - msg: - type: string - title: Message - type: - type: string - title: Error Type - type: object - required: - - loc - - msg - - type - title: ValidationError diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v2-simple-auth-manager-generated.yaml b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v2-simple-auth-manager-generated.yaml new file mode 100644 index 0000000000000..d8ffda58d8d52 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/openapi/v2-simple-auth-manager-generated.yaml @@ -0,0 +1,215 @@ +openapi: 3.1.0 +info: + title: Simple auth manager sub application + description: This is the simple auth manager fastapi sub application. This API is + only available if the auth manager used in the Airflow environment is simple auth + manager. This sub application provides the login form for users to log in. + version: 0.1.0 +paths: + /auth/token: + post: + tags: + - SimpleAuthManagerLogin + summary: Create Token + description: Authenticate the user. + operationId: create_token + parameters: + - name: Content-Type + in: header + required: false + schema: + type: string + description: Content-Type of the request body + enum: + - application/json + - application/x-www-form-urlencoded + default: application/json + title: Content-Type + description: Content-Type of the request body + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/LoginResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '415': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unsupported Media Type + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LoginBody' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/LoginBody' + get: + tags: + - SimpleAuthManagerLogin + summary: Create Token All Admins + description: Create a token with no credentials only if ``simple_auth_manager_all_admins`` + is True. + operationId: create_token_all_admins + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/LoginResponse' + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + /auth/token/login: + get: + tags: + - SimpleAuthManagerLogin + summary: Login All Admins + description: Login the user with no credentials. + operationId: login_all_admins + responses: + '307': + description: Successful Response + content: + application/json: + schema: {} + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + /auth/token/cli: + post: + tags: + - SimpleAuthManagerLogin + summary: Create Token Cli + description: Authenticate the user for the CLI. + operationId: create_token_cli + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/LoginBody' + required: true + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/LoginResponse' + '400': + description: Bad Request + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' +components: + schemas: + HTTPExceptionResponse: + properties: + detail: + anyOf: + - type: string + - additionalProperties: true + type: object + title: Detail + type: object + required: + - detail + title: HTTPExceptionResponse + description: HTTPException Model used for error response. + HTTPValidationError: + properties: + detail: + items: + $ref: '#/components/schemas/ValidationError' + type: array + title: Detail + type: object + title: HTTPValidationError + LoginBody: + properties: + username: + type: string + title: Username + password: + type: string + title: Password + additionalProperties: false + type: object + required: + - username + - password + title: LoginBody + description: Login serializer for post bodies. + LoginResponse: + properties: + access_token: + type: string + title: Access Token + type: object + required: + - access_token + title: LoginResponse + description: Login serializer for responses. + ValidationError: + properties: + loc: + items: + anyOf: + - type: string + - type: integer + type: array + title: Location + msg: + type: string + title: Message + type: + type: string + title: Error Type + type: object + required: + - loc + - msg + - type + title: ValidationError diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/routes/login.py b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/routes/login.py index c901692d8f9c8..f46674301ead1 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/routes/login.py +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/routes/login.py @@ -17,13 +17,15 @@ from __future__ import annotations -from fastapi import status +from fastapi import Depends, Request, status from starlette.responses import RedirectResponse from airflow.api_fastapi.auth.managers.base_auth_manager import COOKIE_NAME_JWT_TOKEN from airflow.api_fastapi.auth.managers.simple.datamodels.login import LoginBody, LoginResponse from airflow.api_fastapi.auth.managers.simple.services.login import SimpleAuthManagerLogin +from airflow.api_fastapi.auth.managers.simple.utils import parse_login_body from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.common.types import Mimetype from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.configuration import conf @@ -33,10 +35,33 @@ @login_router.post( "/token", status_code=status.HTTP_201_CREATED, - responses=create_openapi_http_exception_doc([status.HTTP_400_BAD_REQUEST, status.HTTP_401_UNAUTHORIZED]), + responses={ + **create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_401_UNAUTHORIZED, + status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, + ] + ), + 201: { + "description": "Successful Response", + "content": { + Mimetype.JSON: {"schema": {"$ref": "#/components/schemas/LoginResponse"}}, + }, + }, + }, + openapi_extra={ + "requestBody": { + "required": True, + "content": { + "application/json": {"schema": {"$ref": "#/components/schemas/LoginBody"}}, + "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/LoginBody"}}, + }, + } + }, ) def create_token( - body: LoginBody, + body: LoginBody = Depends(parse_login_body), ) -> LoginResponse: """Authenticate the user.""" return LoginResponse(access_token=SimpleAuthManagerLogin.create_token(body=body)) @@ -57,10 +82,14 @@ def create_token_all_admins() -> LoginResponse: status_code=status.HTTP_307_TEMPORARY_REDIRECT, responses=create_openapi_http_exception_doc([status.HTTP_403_FORBIDDEN]), ) -def login_all_admins() -> RedirectResponse: +def login_all_admins(request: Request) -> RedirectResponse: """Login the user with no credentials.""" response = RedirectResponse(url=conf.get("api", "base_url", fallback="/")) - secure = conf.has_option("api", "ssl_cert") + + # The default config has this as an empty string, so we can't use `has_option`. + # And look at the request info (needs `--proxy-headers` flag to api-server) + secure = request.base_url.scheme == "https" or bool(conf.get("api", "ssl_cert", fallback="")) + response.set_cookie( COOKIE_NAME_JWT_TOKEN, SimpleAuthManagerLogin.create_token_all_admins(), diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/eslint.config.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/eslint.config.js index 31467415319b3..f2747f1f807e1 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/eslint.config.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/eslint.config.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/common.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/common.ts index 497c0a3e40cf5..868eed70220fc 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/common.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/common.ts @@ -1,37 +1,14 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { UseQueryResult } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 +import { UseQueryResult } from "@tanstack/react-query"; import { SimpleAuthManagerLoginService } from "../requests/services.gen"; - -export type SimpleAuthManagerLoginServiceCreateTokenAllAdminsDefaultResponse = Awaited< - ReturnType ->; -export type SimpleAuthManagerLoginServiceCreateTokenAllAdminsQueryResult< - TData = SimpleAuthManagerLoginServiceCreateTokenAllAdminsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useSimpleAuthManagerLoginServiceCreateTokenAllAdminsKey = - "SimpleAuthManagerLoginServiceCreateTokenAllAdmins"; -export const UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn = (queryKey?: Array) => [ - useSimpleAuthManagerLoginServiceCreateTokenAllAdminsKey, - ...(queryKey ?? []), -]; -export type SimpleAuthManagerLoginServiceLoginAllAdminsDefaultResponse = Awaited< - ReturnType ->; -export type SimpleAuthManagerLoginServiceLoginAllAdminsQueryResult< - TData = SimpleAuthManagerLoginServiceLoginAllAdminsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useSimpleAuthManagerLoginServiceLoginAllAdminsKey = - "SimpleAuthManagerLoginServiceLoginAllAdmins"; -export const UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn = (queryKey?: Array) => [ - useSimpleAuthManagerLoginServiceLoginAllAdminsKey, - ...(queryKey ?? []), -]; -export type SimpleAuthManagerLoginServiceCreateTokenMutationResult = Awaited< - ReturnType ->; -export type SimpleAuthManagerLoginServiceCreateTokenCliMutationResult = Awaited< - ReturnType ->; +export type SimpleAuthManagerLoginServiceCreateTokenAllAdminsDefaultResponse = Awaited>; +export type SimpleAuthManagerLoginServiceCreateTokenAllAdminsQueryResult = UseQueryResult; +export const useSimpleAuthManagerLoginServiceCreateTokenAllAdminsKey = "SimpleAuthManagerLoginServiceCreateTokenAllAdmins"; +export const UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn = (queryKey?: Array) => [useSimpleAuthManagerLoginServiceCreateTokenAllAdminsKey, ...(queryKey ?? [])]; +export type SimpleAuthManagerLoginServiceLoginAllAdminsDefaultResponse = Awaited>; +export type SimpleAuthManagerLoginServiceLoginAllAdminsQueryResult = UseQueryResult; +export const useSimpleAuthManagerLoginServiceLoginAllAdminsKey = "SimpleAuthManagerLoginServiceLoginAllAdmins"; +export const UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn = (queryKey?: Array) => [useSimpleAuthManagerLoginServiceLoginAllAdminsKey, ...(queryKey ?? [])]; +export type SimpleAuthManagerLoginServiceCreateTokenMutationResult = Awaited>; +export type SimpleAuthManagerLoginServiceCreateTokenCliMutationResult = Awaited>; diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/ensureQueryData.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/ensureQueryData.ts index 2d73f46985c5b..c1213d8527779 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/ensureQueryData.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/ensureQueryData.ts @@ -1,16 +1,7 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { type QueryClient } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 +import { type QueryClient } from "@tanstack/react-query"; import { SimpleAuthManagerLoginService } from "../requests/services.gen"; import * as Common from "./common"; - -export const ensureUseSimpleAuthManagerLoginServiceCreateTokenAllAdminsData = (queryClient: QueryClient) => - queryClient.ensureQueryData({ - queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(), - queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins(), - }); -export const ensureUseSimpleAuthManagerLoginServiceLoginAllAdminsData = (queryClient: QueryClient) => - queryClient.ensureQueryData({ - queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(), - queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins(), - }); +export const ensureUseSimpleAuthManagerLoginServiceCreateTokenAllAdminsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(), queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins() }); +export const ensureUseSimpleAuthManagerLoginServiceLoginAllAdminsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(), queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins() }); diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/index.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/index.ts index 987c8a4ea6dde..8e9b6922f00c8 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/index.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/index.ts @@ -1,4 +1,4 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 +// generated with @7nohe/openapi-react-query-codegen@1.6.2 export * from "./common"; export * from "./queries"; diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/infiniteQueries.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/infiniteQueries.ts index 0baac0445f402..37298729b1133 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/infiniteQueries.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/infiniteQueries.ts @@ -1 +1,2 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 +// generated with @7nohe/openapi-react-query-codegen@1.6.2 + diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/prefetch.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/prefetch.ts index a9e6112475c95..6801202bf314f 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/prefetch.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/prefetch.ts @@ -1,16 +1,7 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { type QueryClient } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 +import { type QueryClient } from "@tanstack/react-query"; import { SimpleAuthManagerLoginService } from "../requests/services.gen"; import * as Common from "./common"; - -export const prefetchUseSimpleAuthManagerLoginServiceCreateTokenAllAdmins = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(), - queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins(), - }); -export const prefetchUseSimpleAuthManagerLoginServiceLoginAllAdmins = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(), - queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins(), - }); +export const prefetchUseSimpleAuthManagerLoginServiceCreateTokenAllAdmins = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(), queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins() }); +export const prefetchUseSimpleAuthManagerLoginServiceLoginAllAdmins = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(), queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins() }); diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/queries.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/queries.ts index 316aa8d02ce0e..409795e6945c4 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/queries.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/queries.ts @@ -1,91 +1,20 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 +import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; import { SimpleAuthManagerLoginService } from "../requests/services.gen"; import { LoginBody } from "../requests/types.gen"; import * as Common from "./common"; - -export const useSimpleAuthManagerLoginServiceCreateTokenAllAdmins = < - TData = Common.SimpleAuthManagerLoginServiceCreateTokenAllAdminsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(queryKey), - queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins() as TData, - ...options, - }); -export const useSimpleAuthManagerLoginServiceLoginAllAdmins = < - TData = Common.SimpleAuthManagerLoginServiceLoginAllAdminsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(queryKey), - queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins() as TData, - ...options, - }); -export const useSimpleAuthManagerLoginServiceCreateToken = < - TData = Common.SimpleAuthManagerLoginServiceCreateTokenMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: LoginBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: LoginBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - SimpleAuthManagerLoginService.createToken({ requestBody }) as unknown as Promise, - ...options, - }); -export const useSimpleAuthManagerLoginServiceCreateTokenCli = < - TData = Common.SimpleAuthManagerLoginServiceCreateTokenCliMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: LoginBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: LoginBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - SimpleAuthManagerLoginService.createTokenCli({ requestBody }) as unknown as Promise, - ...options, - }); +export const useSimpleAuthManagerLoginServiceCreateTokenAllAdmins = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(queryKey), queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins() as TData, ...options }); +export const useSimpleAuthManagerLoginServiceLoginAllAdmins = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(queryKey), queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins() as TData, ...options }); +export const useSimpleAuthManagerLoginServiceCreateToken = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ contentType, requestBody }) => SimpleAuthManagerLoginService.createToken({ contentType, requestBody }) as unknown as Promise, ...options }); +export const useSimpleAuthManagerLoginServiceCreateTokenCli = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => SimpleAuthManagerLoginService.createTokenCli({ requestBody }) as unknown as Promise, ...options }); diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/suspense.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/suspense.ts index de11487464fe4..79b01bc72cb32 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/suspense.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/queries/suspense.ts @@ -1,32 +1,7 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 +import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; import { SimpleAuthManagerLoginService } from "../requests/services.gen"; import * as Common from "./common"; - -export const useSimpleAuthManagerLoginServiceCreateTokenAllAdminsSuspense = < - TData = Common.SimpleAuthManagerLoginServiceCreateTokenAllAdminsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(queryKey), - queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins() as TData, - ...options, - }); -export const useSimpleAuthManagerLoginServiceLoginAllAdminsSuspense = < - TData = Common.SimpleAuthManagerLoginServiceLoginAllAdminsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(queryKey), - queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins() as TData, - ...options, - }); +export const useSimpleAuthManagerLoginServiceCreateTokenAllAdminsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseSimpleAuthManagerLoginServiceCreateTokenAllAdminsKeyFn(queryKey), queryFn: () => SimpleAuthManagerLoginService.createTokenAllAdmins() as TData, ...options }); +export const useSimpleAuthManagerLoginServiceLoginAllAdminsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseSimpleAuthManagerLoginServiceLoginAllAdminsKeyFn(queryKey), queryFn: () => SimpleAuthManagerLoginService.loginAllAdmins() as TData, ...options }); diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/services.gen.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/services.gen.ts index 754bf68c5c8c1..34de8d7c82c0f 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/services.gen.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/services.gen.ts @@ -3,35 +3,20 @@ import type { CancelablePromise } from "./core/CancelablePromise"; import { OpenAPI } from "./core/OpenAPI"; import { request as __request } from "./core/request"; import type { - CreateTokenAllAdminsResponse, CreateTokenData, CreateTokenResponse, + CreateTokenAllAdminsResponse, CreateTokenCliData, CreateTokenCliResponse, } from "./types.gen"; export class SimpleAuthManagerLoginService { - /** - * Create Token All Admins - * Create a token with no credentials only if ``simple_auth_manager_all_admins`` is True. - * @returns LoginResponse Successful Response - * @throws ApiError - */ - public static createTokenAllAdmins(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/auth/token", - errors: { - 403: "Forbidden", - }, - }); - } - /** * Create Token * Authenticate the user. * @param data The data for the request. * @param data.requestBody + * @param data.contentType Content-Type of the request body * @returns LoginResponse Successful Response * @throws ApiError */ @@ -39,16 +24,36 @@ export class SimpleAuthManagerLoginService { return __request(OpenAPI, { method: "POST", url: "/auth/token", + headers: { + "Content-Type": data.contentType, + }, body: data.requestBody, mediaType: "application/json", errors: { 400: "Bad Request", 401: "Unauthorized", + 415: "Unsupported Media Type", 422: "Validation Error", }, }); } + /** + * Create Token All Admins + * Create a token with no credentials only if ``simple_auth_manager_all_admins`` is True. + * @returns LoginResponse Successful Response + * @throws ApiError + */ + public static createTokenAllAdmins(): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/auth/token", + errors: { + 403: "Forbidden", + }, + }); + } + /** * Login All Admins * Login the user with no credentials. diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/types.gen.ts index 058c1e08b1626..b0e3d92afe481 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/openapi-gen/requests/types.gen.ts @@ -36,14 +36,18 @@ export type ValidationError = { type: string; }; -export type CreateTokenAllAdminsResponse = LoginResponse; - export type CreateTokenData = { + /** + * Content-Type of the request body + */ + contentType?: "application/json" | "application/x-www-form-urlencoded"; requestBody: LoginBody; }; export type CreateTokenResponse = LoginResponse; +export type CreateTokenAllAdminsResponse = LoginResponse; + export type CreateTokenCliData = { requestBody: LoginBody; }; @@ -52,18 +56,6 @@ export type CreateTokenCliResponse = LoginResponse; export type $OpenApiTs = { "/auth/token": { - get: { - res: { - /** - * Successful Response - */ - 201: LoginResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - }; - }; post: { req: CreateTokenData; res: { @@ -79,12 +71,28 @@ export type $OpenApiTs = { * Unauthorized */ 401: HTTPExceptionResponse; + /** + * Unsupported Media Type + */ + 415: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; + get: { + res: { + /** + * Successful Response + */ + 201: LoginResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + }; + }; }; "/auth/token/login": { get: { diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package-lock.json b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package-lock.json index 300d4544dce0e..8e5aaf33ad1f0 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package-lock.json +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package-lock.json @@ -8,43 +8,43 @@ "name": "simple-auth-manager-ui", "version": "0.0.0", "dependencies": { - "@chakra-ui/react": "^3.14.2", - "@tanstack/react-query": "^5.70.0", - "axios": "^1.8.4", + "@chakra-ui/react": "^3.24.0", + "@tanstack/react-query": "^5.84.1", + "axios": "^1.11.0", "next-themes": "^0.4.6", - "react": "^19.0.0", + "react": "^19.1.1", "react-cookie": "^8.0.1", - "react-dom": "^19.0.0", - "react-hook-form": "^7.54.2", - "react-router-dom": "^7.4.0" + "react-dom": "^19.1.1", + "react-hook-form": "^7.61.1", + "react-router-dom": "^7.7.1" }, "devDependencies": { "@7nohe/openapi-react-query-codegen": "^1.6.2", - "@eslint/compat": "^1.2.7", - "@eslint/js": "^9.23.0", - "@stylistic/eslint-plugin": "^2.13.0", - "@testing-library/jest-dom": "^6.6.3", - "@testing-library/react": "^16.2.0", - "@trivago/prettier-plugin-sort-imports": "^4.3.0", - "@types/react": "^18.3.19", - "@types/react-dom": "^19.0.0", - "@vitejs/plugin-react-swc": "^3.8.1", - "eslint": "^9.23.0", - "eslint-config-prettier": "^10.1.1", + "@eslint/compat": "^1.3.1", + "@eslint/js": "^9.32.0", + "@stylistic/eslint-plugin": "^5.2.2", + "@testing-library/jest-dom": "^6.6.4", + "@testing-library/react": "^16.3.0", + "@trivago/prettier-plugin-sort-imports": "^5.2.2", + "@types/react": "^19.1.9", + "@types/react-dom": "^19.1.7", + "@vitejs/plugin-react-swc": "^3.11.0", + "eslint": "^9.32.0", + "eslint-config-prettier": "^10.1.8", "eslint-plugin-jsx-a11y": "^6.10.2", - "eslint-plugin-perfectionist": "^4.10.1", - "eslint-plugin-prettier": "^5.2.5", - "eslint-plugin-react": "^7.37.4", - "eslint-plugin-react-hooks": "^4.6.2", - "eslint-plugin-react-refresh": "^0.4.19", - "eslint-plugin-unicorn": "^55.0.0", - "happy-dom": "^17.4.4", - "prettier": "^3.5.3", - "typescript": "~5.5.4", - "typescript-eslint": "^8.27.0", - "vite": "^6.2.6", + "eslint-plugin-perfectionist": "^4.15.0", + "eslint-plugin-prettier": "^5.5.3", + "eslint-plugin-react": "^7.37.5", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.20", + "eslint-plugin-unicorn": "^60.0.0", + "happy-dom": "^18.0.1", + "prettier": "^3.6.2", + "typescript": "~5.9.2", + "typescript-eslint": "^8.38.0", + "vite": "^7.0.6", "vite-plugin-css-injected-by-js": "^3.5.2", - "vitest": "^3.0.9" + "vitest": "^3.2.4" } }, "node_modules/@7nohe/openapi-react-query-codegen": { @@ -97,65 +97,70 @@ } }, "node_modules/@ark-ui/react": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/@ark-ui/react/-/react-5.4.0.tgz", - "integrity": "sha512-TatFGOb6zKx4a363jg3McQY+2/wEcUZgTHZTomueFMR+JgqHR98aAFnCPvi2L5UF+326qXEWHxHIPlQLwFUb1A==", - "license": "MIT", - "dependencies": { - "@internationalized/date": "3.7.0", - "@zag-js/accordion": "1.7.0", - "@zag-js/anatomy": "1.7.0", - "@zag-js/auto-resize": "1.7.0", - "@zag-js/avatar": "1.7.0", - "@zag-js/carousel": "1.7.0", - "@zag-js/checkbox": "1.7.0", - "@zag-js/clipboard": "1.7.0", - "@zag-js/collapsible": "1.7.0", - "@zag-js/collection": "1.7.0", - "@zag-js/color-picker": "1.7.0", - "@zag-js/color-utils": "1.7.0", - "@zag-js/combobox": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/date-picker": "1.7.0", - "@zag-js/date-utils": "1.7.0", - "@zag-js/dialog": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/editable": "1.7.0", - "@zag-js/file-upload": "1.7.0", - "@zag-js/file-utils": "1.7.0", - "@zag-js/focus-trap": "1.7.0", - "@zag-js/highlight-word": "1.7.0", - "@zag-js/hover-card": "1.7.0", - "@zag-js/i18n-utils": "1.7.0", - "@zag-js/menu": "1.7.0", - "@zag-js/number-input": "1.7.0", - "@zag-js/pagination": "1.7.0", - "@zag-js/pin-input": "1.7.0", - "@zag-js/popover": "1.7.0", - "@zag-js/presence": "1.7.0", - "@zag-js/progress": "1.7.0", - "@zag-js/qr-code": "1.7.0", - "@zag-js/radio-group": "1.7.0", - "@zag-js/rating-group": "1.7.0", - "@zag-js/react": "1.7.0", - "@zag-js/select": "1.7.0", - "@zag-js/signature-pad": "1.7.0", - "@zag-js/slider": "1.7.0", - "@zag-js/splitter": "1.7.0", - "@zag-js/steps": "1.7.0", - "@zag-js/switch": "1.7.0", - "@zag-js/tabs": "1.7.0", - "@zag-js/tags-input": "1.7.0", - "@zag-js/time-picker": "1.7.0", - "@zag-js/timer": "1.7.0", - "@zag-js/toast": "1.7.0", - "@zag-js/toggle": "1.7.0", - "@zag-js/toggle-group": "1.7.0", - "@zag-js/tooltip": "1.7.0", - "@zag-js/tour": "1.7.0", - "@zag-js/tree-view": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "version": "5.18.2", + "resolved": "https://registry.npmjs.org/@ark-ui/react/-/react-5.18.2.tgz", + "integrity": "sha512-vM2cuKSIe4mCDfqMc4RggsmiulXbicTjpZLf1IUXSHcUluMVn+z2k1minKI4X+Z7XSoKH0To7asxS0nJ1UPODA==", + "license": "MIT", + "dependencies": { + "@internationalized/date": "3.8.2", + "@zag-js/accordion": "1.21.0", + "@zag-js/anatomy": "1.21.0", + "@zag-js/angle-slider": "1.21.0", + "@zag-js/auto-resize": "1.21.0", + "@zag-js/avatar": "1.21.0", + "@zag-js/carousel": "1.21.0", + "@zag-js/checkbox": "1.21.0", + "@zag-js/clipboard": "1.21.0", + "@zag-js/collapsible": "1.21.0", + "@zag-js/collection": "1.21.0", + "@zag-js/color-picker": "1.21.0", + "@zag-js/color-utils": "1.21.0", + "@zag-js/combobox": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/date-picker": "1.21.0", + "@zag-js/date-utils": "1.21.0", + "@zag-js/dialog": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/editable": "1.21.0", + "@zag-js/file-upload": "1.21.0", + "@zag-js/file-utils": "1.21.0", + "@zag-js/floating-panel": "1.21.0", + "@zag-js/focus-trap": "1.21.0", + "@zag-js/highlight-word": "1.21.0", + "@zag-js/hover-card": "1.21.0", + "@zag-js/i18n-utils": "1.21.0", + "@zag-js/json-tree-utils": "1.21.0", + "@zag-js/listbox": "1.21.0", + "@zag-js/menu": "1.21.0", + "@zag-js/number-input": "1.21.0", + "@zag-js/pagination": "1.21.0", + "@zag-js/password-input": "1.21.0", + "@zag-js/pin-input": "1.21.0", + "@zag-js/popover": "1.21.0", + "@zag-js/presence": "1.21.0", + "@zag-js/progress": "1.21.0", + "@zag-js/qr-code": "1.21.0", + "@zag-js/radio-group": "1.21.0", + "@zag-js/rating-group": "1.21.0", + "@zag-js/react": "1.21.0", + "@zag-js/select": "1.21.0", + "@zag-js/signature-pad": "1.21.0", + "@zag-js/slider": "1.21.0", + "@zag-js/splitter": "1.21.0", + "@zag-js/steps": "1.21.0", + "@zag-js/switch": "1.21.0", + "@zag-js/tabs": "1.21.0", + "@zag-js/tags-input": "1.21.0", + "@zag-js/time-picker": "1.21.0", + "@zag-js/timer": "1.21.0", + "@zag-js/toast": "1.21.0", + "@zag-js/toggle": "1.21.0", + "@zag-js/toggle-group": "1.21.0", + "@zag-js/tooltip": "1.21.0", + "@zag-js/tour": "1.21.0", + "@zag-js/tree-view": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" }, "peerDependencies": { "react": ">=18.0.0", @@ -163,26 +168,27 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", + "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/generator": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.5.tgz", - "integrity": "sha512-2caSP6fN9I7HOe6nqhtft7V4g7/V/gfDsC3Ag4W7kEzzvRGKqiv0pu0HogPiZ3KaVSoNDhUws6IJjDjpfmYIXw==", - "peer": true, + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.1.tgz", + "integrity": "sha512-UnJfnIpc/+JO0/+KRVQNGU+y5taA5vCbwN8+azkX6beii/ZF+enZJSOKo11ZSzGJjlNfJHfQtmQT8H+9TXPG2w==", + "license": "MIT", "dependencies": { - "@babel/parser": "^7.26.5", - "@babel/types": "^7.26.5", + "@babel/parser": "^7.27.1", + "@babel/types": "^7.27.1", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^3.0.2" @@ -191,46 +197,6 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz", - "integrity": "sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz", - "integrity": "sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz", - "integrity": "sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-module-imports": { "version": "7.25.9", "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", @@ -244,42 +210,31 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz", - "integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz", - "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.1.tgz", + "integrity": "sha512-I0dZ3ZpCrJ1c04OqlNsQcKiZlsrXf/kkE4FXzID9rIOYICsAbA8mMDzhW/luRNAHdCNt7os/u8wenklZDlUVUQ==", "license": "MIT", "dependencies": { - "@babel/types": "^7.27.0" + "@babel/types": "^7.27.1" }, "bin": { "parser": "bin/babel-parser.js" @@ -289,9 +244,10 @@ } }, "node_modules/@babel/runtime": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", - "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.0.tgz", + "integrity": "sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==", + "license": "MIT", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -300,29 +256,30 @@ } }, "node_modules/@babel/template": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", - "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.1.tgz", + "integrity": "sha512-Fyo3ghWMqkHHpHQCoBs2VnYjR4iWFFjguTDEqA5WgZDOrFesVjMhMM2FSqTKSoUSDO1VQtavj8NFpdRBEvJTtg==", + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.25.9", - "@babel/parser": "^7.25.9", - "@babel/types": "^7.25.9" + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.1", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.5.tgz", - "integrity": "sha512-rkOSPOw+AXbgtwUga3U4u8RpoK9FEFWBNAlTpcnkLFjL5CT+oyHNuUUC/xx6XefEJ16r38r8Bc/lfp6rYuHeJQ==", - "peer": true, + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.1.tgz", + "integrity": "sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==", + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.5", - "@babel/parser": "^7.26.5", - "@babel/template": "^7.25.9", - "@babel/types": "^7.26.5", + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.27.1", + "@babel/parser": "^7.27.1", + "@babel/template": "^7.27.1", + "@babel/types": "^7.27.1", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -331,30 +288,30 @@ } }, "node_modules/@babel/types": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", - "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.1.tgz", + "integrity": "sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==", "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@chakra-ui/react": { - "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@chakra-ui/react/-/react-3.15.0.tgz", - "integrity": "sha512-U7mR9ru5Vhpat57nP04lenVDtaMzPKfKedhBDkesk5VUbzr5euWygjspa/tTO37ew7t7Q/pyUovXAizoWEzZ1g==", + "version": "3.24.0", + "resolved": "https://registry.npmjs.org/@chakra-ui/react/-/react-3.24.0.tgz", + "integrity": "sha512-fkKXtPJ2WVwgDAL50W2yHLzGrv8YAY6g09yrIMU8LuUkTa+xSlQJadAtuqARc0TqxTha+RL2rjPZkWvL/f0I6w==", "license": "MIT", "dependencies": { - "@ark-ui/react": "5.4.0", + "@ark-ui/react": "5.18.2", "@emotion/is-prop-valid": "1.3.1", "@emotion/serialize": "1.3.3", "@emotion/use-insertion-effect-with-fallbacks": "1.2.0", "@emotion/utils": "1.4.2", - "@pandacss/is-valid-prop": "0.41.0", + "@pandacss/is-valid-prop": "0.54.0", "csstype": "3.1.3", "fast-safe-stringify": "2.1.1" }, @@ -906,10 +863,11 @@ } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", - "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", "dev": true, + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^3.4.3" }, @@ -945,16 +903,15 @@ } }, "node_modules/@eslint/compat": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.2.8.tgz", - "integrity": "sha512-LqCYHdWL/QqKIJuZ/ucMAv8d4luKGs4oCPgpt8mWztQAtPrHfXKQ/XAUc8ljCHAfJCn6SvkpTcGt5Tsh8saowA==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.3.1.tgz", + "integrity": "sha512-k8MHony59I5EPic6EQTCNOuPoVBnoYXkP+20xvwFjN7t0qI3ImyvyBgg+hIVPwC8JaxVjjUZld+cLfBLFDLucg==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "peerDependencies": { - "eslint": "^9.10.0" + "eslint": "^8.40 || 9" }, "peerDependenciesMeta": { "eslint": { @@ -963,11 +920,10 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.19.2", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz", - "integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==", + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", "dev": true, - "license": "Apache-2.0", "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", @@ -978,19 +934,18 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.0.tgz", - "integrity": "sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", + "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/core": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz", - "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==", + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1038,13 +993,16 @@ } }, "node_modules/@eslint/js": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.23.0.tgz", - "integrity": "sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw==", + "version": "9.32.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.32.0.tgz", + "integrity": "sha512-BBpRFZK3eX6uMLKz8WxFOBIFFcGFJ/g8XuwjTHCqHROSIsopI+ddn/d5Cfh36+7+e5edVS8dbSHnBNhrLEX0zg==", "dev": true, "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" } }, "node_modules/@eslint/object-schema": { @@ -1052,19 +1010,18 @@ "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.7.tgz", - "integrity": "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.4.tgz", + "integrity": "sha512-Ul5l+lHEcw3L5+k8POx6r74mxEYKG5kOb6Xpy2gCRW6zweT6TEhAf8vhxGgjhqrd/VO/Dirhsb+1hNpD1ue9hw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.12.0", + "@eslint/core": "^0.15.1", "levn": "^0.4.1" }, "engines": { @@ -1072,28 +1029,28 @@ } }, "node_modules/@floating-ui/core": { - "version": "1.6.9", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.9.tgz", - "integrity": "sha512-uMXCuQ3BItDUbAMhIXw7UPXRfAlOAvZzdK9BWpE60MCn+Svt3aLn9jsPTi/WNGlRUu2uI0v5S7JiIUsbsvh3fw==", + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz", + "integrity": "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==", "license": "MIT", "dependencies": { - "@floating-ui/utils": "^0.2.9" + "@floating-ui/utils": "^0.2.10" } }, "node_modules/@floating-ui/dom": { - "version": "1.6.13", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.13.tgz", - "integrity": "sha512-umqzocjDgNRGTuO7Q8CU32dkHkECqI8ZdMZ5Swb6QAM0t5rnlrN3lGo1hdpscRd3WS8T6DKYK4ephgIH9iRh3w==", + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.2.tgz", + "integrity": "sha512-7cfaOQuCS27HD7DX+6ib2OrnW+b4ZBwDNnCcT0uTyidcmyWb03FnQqJybDBoCnpdxwBSfA94UAYlRCt7mV+TbA==", "license": "MIT", "dependencies": { - "@floating-ui/core": "^1.6.0", - "@floating-ui/utils": "^0.2.9" + "@floating-ui/core": "^1.7.2", + "@floating-ui/utils": "^0.2.10" } }, "node_modules/@floating-ui/utils": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.9.tgz", - "integrity": "sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==", + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", + "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", "license": "MIT" }, "node_modules/@hey-api/openapi-ts": { @@ -1181,18 +1138,18 @@ } }, "node_modules/@internationalized/date": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.7.0.tgz", - "integrity": "sha512-VJ5WS3fcVx0bejE/YHfbDKR/yawZgKqn/if+oEeLqNwBtPzVB06olkfcnojTmEMX+gTpH+FlQ69SHNitJ8/erQ==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.8.2.tgz", + "integrity": "sha512-/wENk7CbvLbkUvX1tu0mwq49CVkkWpkXubGel6birjRPyo6uQ4nQpnq5xZu823zRCwwn82zgHrvgF1vZyvmVgA==", "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } }, "node_modules/@internationalized/number": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@internationalized/number/-/number-3.6.0.tgz", - "integrity": "sha512-PtrRcJVy7nw++wn4W2OuePQQfTqDzfusSuY1QTtui4wa7r+rGVtR75pO8CyKvHvzyQYi3Q1uO5sY0AsB4e65Bw==", + "version": "3.6.3", + "resolved": "https://registry.npmjs.org/@internationalized/number/-/number-3.6.3.tgz", + "integrity": "sha512-p+Zh1sb6EfrfVaS86jlHGQ9HA66fJhV9x5LiE5vCbZtXEHAuhcmUZUdZ4WrFpUBfNalr2OkAJI5AcKEQF+Lebw==", "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" @@ -1301,9 +1258,9 @@ } }, "node_modules/@pandacss/is-valid-prop": { - "version": "0.41.0", - "resolved": "https://registry.npmjs.org/@pandacss/is-valid-prop/-/is-valid-prop-0.41.0.tgz", - "integrity": "sha512-BE6h6CsJk14ugIRrsazJtN3fcg+KDFRat1Bs93YFKH6jd4DOb1yUyVvC70jKqPVvg70zEcV8acZ7VdcU5TLu+w==" + "version": "0.54.0", + "resolved": "https://registry.npmjs.org/@pandacss/is-valid-prop/-/is-valid-prop-0.54.0.tgz", + "integrity": "sha512-UhRgg1k9VKRCBAHl+XUK3lvN0k9bYifzYGZOqajDid4L1DyU813A1L0ZwN4iV9WX5TX3PfUugqtgG9LnIeFGBQ==" }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", @@ -1317,289 +1274,330 @@ } }, "node_modules/@pkgr/core": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.0.tgz", - "integrity": "sha512-vsJDAkYR6qCPu+ioGScGiMYR7LvZYIXh/dlQeviqoTWNCVfKTLYD/LkNWH4Mxsv2a5vpIRc77FN5DnmK1eBggQ==", + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.7.tgz", + "integrity": "sha512-YLT9Zo3oNPJoBjBc4q8G2mjU4tqIbf5CEOORbUUr48dCD9q3umJ3IPlVqOqDakPfd2HuwccBaqlGhN4Gmr5OWg==", "dev": true, "license": "MIT", "engines": { "node": "^12.20.0 || ^14.18.0 || >=16.0.0" }, "funding": { - "url": "https://opencollective.com/unts" + "url": "https://opencollective.com/pkgr" } }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.30.1.tgz", - "integrity": "sha512-pSWY+EVt3rJ9fQ3IqlrEUtXh3cGqGtPDH1FQlNZehO2yYxCHEX1SPsz1M//NXwYfbTlcKr9WObLnJX9FsS9K1Q==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.40.1.tgz", + "integrity": "sha512-kxz0YeeCrRUHz3zyqvd7n+TVRlNyTifBsmnmNPtk3hQURUyG9eAB+usz6DAwagMusjx/zb3AjvDUvhFGDAexGw==", "cpu": [ "arm" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "android" ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.30.1.tgz", - "integrity": "sha512-/NA2qXxE3D/BRjOJM8wQblmArQq1YoBVJjrjoTSBS09jgUisq7bqxNHJ8kjCHeV21W/9WDGwJEWSN0KQ2mtD/w==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.40.1.tgz", + "integrity": "sha512-PPkxTOisoNC6TpnDKatjKkjRMsdaWIhyuMkA4UsBXT9WEZY4uHezBTjs6Vl4PbqQQeu6oION1w2voYZv9yquCw==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "android" ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.30.1.tgz", - "integrity": "sha512-r7FQIXD7gB0WJ5mokTUgUWPl0eYIH0wnxqeSAhuIwvnnpjdVB8cRRClyKLQr7lgzjctkbp5KmswWszlwYln03Q==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.40.1.tgz", + "integrity": "sha512-VWXGISWFY18v/0JyNUy4A46KCFCb9NVsH+1100XP31lud+TzlezBbz24CYzbnA4x6w4hx+NYCXDfnvDVO6lcAA==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "darwin" ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.30.1.tgz", - "integrity": "sha512-x78BavIwSH6sqfP2xeI1hd1GpHL8J4W2BXcVM/5KYKoAD3nNsfitQhvWSw+TFtQTLZ9OmlF+FEInEHyubut2OA==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.40.1.tgz", + "integrity": "sha512-nIwkXafAI1/QCS7pxSpv/ZtFW6TXcNUEHAIA9EIyw5OzxJZQ1YDrX+CL6JAIQgZ33CInl1R6mHet9Y/UZTg2Bw==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "darwin" ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.30.1.tgz", - "integrity": "sha512-HYTlUAjbO1z8ywxsDFWADfTRfTIIy/oUlfIDmlHYmjUP2QRDTzBuWXc9O4CXM+bo9qfiCclmHk1x4ogBjOUpUQ==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.40.1.tgz", + "integrity": "sha512-BdrLJ2mHTrIYdaS2I99mriyJfGGenSaP+UwGi1kB9BLOCu9SR8ZpbkmmalKIALnRw24kM7qCN0IOm6L0S44iWw==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "freebsd" ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.30.1.tgz", - "integrity": "sha512-1MEdGqogQLccphhX5myCJqeGNYTNcmTyaic9S7CG3JhwuIByJ7J05vGbZxsizQthP1xpVx7kd3o31eOogfEirw==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.40.1.tgz", + "integrity": "sha512-VXeo/puqvCG8JBPNZXZf5Dqq7BzElNJzHRRw3vjBE27WujdzuOPecDPc/+1DcdcTptNBep3861jNq0mYkT8Z6Q==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "freebsd" ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.30.1.tgz", - "integrity": "sha512-PaMRNBSqCx7K3Wc9QZkFx5+CX27WFpAMxJNiYGAXfmMIKC7jstlr32UhTgK6T07OtqR+wYlWm9IxzennjnvdJg==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.40.1.tgz", + "integrity": "sha512-ehSKrewwsESPt1TgSE/na9nIhWCosfGSFqv7vwEtjyAqZcvbGIg4JAcV7ZEh2tfj/IlfBeZjgOXm35iOOjadcg==", "cpu": [ "arm" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.30.1.tgz", - "integrity": "sha512-B8Rcyj9AV7ZlEFqvB5BubG5iO6ANDsRKlhIxySXcF1axXYUyqwBok+XZPgIYGBgs7LDXfWfifxhw0Ik57T0Yug==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.40.1.tgz", + "integrity": "sha512-m39iO/aaurh5FVIu/F4/Zsl8xppd76S4qoID8E+dSRQvTyZTOI2gVk3T4oqzfq1PtcvOfAVlwLMK3KRQMaR8lg==", "cpu": [ "arm" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.30.1.tgz", - "integrity": "sha512-hqVyueGxAj3cBKrAI4aFHLV+h0Lv5VgWZs9CUGqr1z0fZtlADVV1YPOij6AhcK5An33EXaxnDLmJdQikcn5NEw==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.40.1.tgz", + "integrity": "sha512-Y+GHnGaku4aVLSgrT0uWe2o2Rq8te9hi+MwqGF9r9ORgXhmHK5Q71N757u0F8yU1OIwUIFy6YiJtKjtyktk5hg==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.30.1.tgz", - "integrity": "sha512-i4Ab2vnvS1AE1PyOIGp2kXni69gU2DAUVt6FSXeIqUCPIR3ZlheMW3oP2JkukDfu3PsexYRbOiJrY+yVNSk9oA==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.40.1.tgz", + "integrity": "sha512-jEwjn3jCA+tQGswK3aEWcD09/7M5wGwc6+flhva7dsQNRZZTe30vkalgIzV4tjkopsTS9Jd7Y1Bsj6a4lzz8gQ==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.30.1.tgz", - "integrity": "sha512-fARcF5g296snX0oLGkVxPmysetwUk2zmHcca+e9ObOovBR++9ZPOhqFUM61UUZ2EYpXVPN1redgqVoBB34nTpQ==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.40.1.tgz", + "integrity": "sha512-ySyWikVhNzv+BV/IDCsrraOAZ3UaC8SZB67FZlqVwXwnFhPihOso9rPOxzZbjp81suB1O2Topw+6Ug3JNegejQ==", "cpu": [ "loong64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.30.1.tgz", - "integrity": "sha512-GLrZraoO3wVT4uFXh67ElpwQY0DIygxdv0BNW9Hkm3X34wu+BkqrDrkcsIapAY+N2ATEbvak0XQ9gxZtCIA5Rw==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.40.1.tgz", + "integrity": "sha512-BvvA64QxZlh7WZWqDPPdt0GH4bznuL6uOO1pmgPnnv86rpUpc8ZxgZwcEgXvo02GRIZX1hQ0j0pAnhwkhwPqWg==", "cpu": [ "ppc64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.30.1.tgz", - "integrity": "sha512-0WKLaAUUHKBtll0wvOmh6yh3S0wSU9+yas923JIChfxOaaBarmb/lBKPF0w/+jTVozFnOXJeRGZ8NvOxvk/jcw==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.40.1.tgz", + "integrity": "sha512-EQSP+8+1VuSulm9RKSMKitTav89fKbHymTf25n5+Yr6gAPZxYWpj3DzAsQqoaHAk9YX2lwEyAf9S4W8F4l3VBQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.40.1.tgz", + "integrity": "sha512-n/vQ4xRZXKuIpqukkMXZt9RWdl+2zgGNx7Uda8NtmLJ06NL8jiHxUawbwC+hdSq1rrw/9CghCpEONor+l1e2gA==", "cpu": [ "riscv64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.30.1.tgz", - "integrity": "sha512-GWFs97Ruxo5Bt+cvVTQkOJ6TIx0xJDD/bMAOXWJg8TCSTEK8RnFeOeiFTxKniTc4vMIaWvCplMAFBt9miGxgkA==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.40.1.tgz", + "integrity": "sha512-h8d28xzYb98fMQKUz0w2fMc1XuGzLLjdyxVIbhbil4ELfk5/orZlSTpF/xdI9C8K0I8lCkq+1En2RJsawZekkg==", "cpu": [ "s390x" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.30.1.tgz", - "integrity": "sha512-UtgGb7QGgXDIO+tqqJ5oZRGHsDLO8SlpE4MhqpY9Llpzi5rJMvrK6ZGhsRCST2abZdBqIBeXW6WPD5fGK5SDwg==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.40.1.tgz", + "integrity": "sha512-XiK5z70PEFEFqcNj3/zRSz/qX4bp4QIraTy9QjwJAb/Z8GM7kVUsD0Uk8maIPeTyPCP03ChdI+VVmJriKYbRHQ==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.30.1.tgz", - "integrity": "sha512-V9U8Ey2UqmQsBT+xTOeMzPzwDzyXmnAoO4edZhL7INkwQcaW1Ckv3WJX3qrrp/VHaDkEWIBWhRwP47r8cdrOow==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.40.1.tgz", + "integrity": "sha512-2BRORitq5rQ4Da9blVovzNCMaUlyKrzMSvkVR0D4qPuOy/+pMCrh1d7o01RATwVy+6Fa1WBw+da7QPeLWU/1mQ==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.30.1.tgz", - "integrity": "sha512-WabtHWiPaFF47W3PkHnjbmWawnX/aE57K47ZDT1BXTS5GgrBUEpvOzq0FI0V/UYzQJgdb8XlhVNH8/fwV8xDjw==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.40.1.tgz", + "integrity": "sha512-b2bcNm9Kbde03H+q+Jjw9tSfhYkzrDUf2d5MAd1bOJuVplXvFhWz7tRtWvD8/ORZi7qSCy0idW6tf2HgxSXQSg==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "win32" ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.30.1.tgz", - "integrity": "sha512-pxHAU+Zv39hLUTdQQHUVHf4P+0C47y/ZloorHpzs2SXMRqeAWmGghzAhfOlzFHHwjvgokdFAhC4V+6kC1lRRfw==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.40.1.tgz", + "integrity": "sha512-DfcogW8N7Zg7llVEfpqWMZcaErKfsj9VvmfSyRjCyo4BI3wPEfrzTtJkZG6gKP/Z92wFm6rz2aDO7/JfiR/whA==", "cpu": [ "ia32" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "win32" ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.30.1.tgz", - "integrity": "sha512-D6qjsXGcvhTjv0kI4fU8tUuBDF/Ueee4SVX79VfNDXZa64TfCW1Slkb6Z7O1p7vflqZjcmOVdZlqf8gvJxc6og==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.40.1.tgz", + "integrity": "sha512-ECyOuDeH3C1I8jH2MK1RtBJW+YPMvSfT0a5NN0nHfQYnDSJ6tUiZH3gzwVP5/Kfh/+Tt7tpWVF9LXNTnhTJ3kA==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "win32" ] }, "node_modules/@stylistic/eslint-plugin": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-2.13.0.tgz", - "integrity": "sha512-RnO1SaiCFHn666wNz2QfZEFxvmiNRqhzaMXHXxXXKt+MEP7aajlPxUSMIQpKAaJfverpovEYqjBOXDq6dDcaOQ==", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-5.2.2.tgz", + "integrity": "sha512-bE2DUjruqXlHYP3Q2Gpqiuj2bHq7/88FnuaS0FjeGGLCy+X6a07bGVuwtiOYnPSLHR6jmx5Bwdv+j7l8H+G97A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/utils": "^8.13.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/types": "^8.37.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", "estraverse": "^5.3.0", - "picomatch": "^4.0.2" + "picomatch": "^4.0.3" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "peerDependencies": { - "eslint": ">=8.40.0" + "eslint": ">=9.0.0" } }, "node_modules/@stylistic/eslint-plugin/node_modules/picomatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", - "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", "engines": { @@ -1610,15 +1608,15 @@ } }, "node_modules/@swc/core": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.11.12.tgz", - "integrity": "sha512-Jwx9JH1O6Vm7BS9AEPLlquJNSy6Lbt/kiJIlxSslDuBLeDJD13lXQfitvazqgRwGEHx1QmwEq8mc0OSristtRw==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.13.1.tgz", + "integrity": "sha512-jEKKErLC6uwSqA+p6bmZR08usZM5Fpc+HdEu5CAzvye0q43yf1si1kjhHEa9XMkz0A2SAaal3eKCg/YYmtOsCA==", "dev": true, "hasInstallScript": true, "license": "Apache-2.0", "dependencies": { "@swc/counter": "^0.1.3", - "@swc/types": "^0.1.19" + "@swc/types": "^0.1.23" }, "engines": { "node": ">=10" @@ -1628,19 +1626,19 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.11.12", - "@swc/core-darwin-x64": "1.11.12", - "@swc/core-linux-arm-gnueabihf": "1.11.12", - "@swc/core-linux-arm64-gnu": "1.11.12", - "@swc/core-linux-arm64-musl": "1.11.12", - "@swc/core-linux-x64-gnu": "1.11.12", - "@swc/core-linux-x64-musl": "1.11.12", - "@swc/core-win32-arm64-msvc": "1.11.12", - "@swc/core-win32-ia32-msvc": "1.11.12", - "@swc/core-win32-x64-msvc": "1.11.12" + "@swc/core-darwin-arm64": "1.13.1", + "@swc/core-darwin-x64": "1.13.1", + "@swc/core-linux-arm-gnueabihf": "1.13.1", + "@swc/core-linux-arm64-gnu": "1.13.1", + "@swc/core-linux-arm64-musl": "1.13.1", + "@swc/core-linux-x64-gnu": "1.13.1", + "@swc/core-linux-x64-musl": "1.13.1", + "@swc/core-win32-arm64-msvc": "1.13.1", + "@swc/core-win32-ia32-msvc": "1.13.1", + "@swc/core-win32-x64-msvc": "1.13.1" }, "peerDependencies": { - "@swc/helpers": "*" + "@swc/helpers": ">=0.5.17" }, "peerDependenciesMeta": { "@swc/helpers": { @@ -1649,9 +1647,9 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.11.12.tgz", - "integrity": "sha512-x+iljeyIaVq7VCAy9pM0rqAb9GKA1cqDkqCxgFDxH3rcH+ykZa12vkDlTwysgkfLV8pr0KhCRHkwY+iAqPbO9g==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.13.1.tgz", + "integrity": "sha512-zO6SW/jSMTUORPm6dUZFPUwf+EFWZsaXWMGXadRG6akCofYpoQb8pcY2QZkVr43z8TMka6BtXpyoD/DJ0iOPHQ==", "cpu": [ "arm64" ], @@ -1666,9 +1664,9 @@ } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.11.12.tgz", - "integrity": "sha512-DwTXPdhJ/+scUR1iWttu3p0q8b5omF71xWFCw6UC99QBJQ4femmRtZNacgdiBkxZ5IbUlxd8m5UzMBc/+H5rWw==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.13.1.tgz", + "integrity": "sha512-8RjaTZYxrlYKE5PgzZYWSOT4mAsyhIuh30Nu4dnn/2r0Ef68iNCbvX4ynGnFMhOIhqunjQbJf+mJKpwTwdHXhw==", "cpu": [ "x64" ], @@ -1683,9 +1681,9 @@ } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.11.12.tgz", - "integrity": "sha512-ls9b3lX2x3tnJKGn6zSDFK1ohdmdUkE6nwqrVmdzqAwr/Q5i2ij/dmkOFCloItc2PHNVtRGGsC4+FYSm1EBLjg==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.13.1.tgz", + "integrity": "sha512-jEqK6pECs2m4BpL2JA/4CCkq04p6iFOEtVNXTisO+lJ3zwmxlnIEm9UfJZG6VSu8GS9MHRKGB0ieZ1tEdN1qDA==", "cpu": [ "arm" ], @@ -1700,9 +1698,9 @@ } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.11.12.tgz", - "integrity": "sha512-F0nMLl5kYbew5GjHq7B21poE5VOPgSsoQ0VEXd4Fji3rR0d0gLoK2r+JP92XmpRxAzdzpdak1DQczWMyf2BQAQ==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.13.1.tgz", + "integrity": "sha512-PbkuIOYXO/gQbWQ7NnYIwm59ygNqmUcF8LBeoKvxhx1VtOwE+9KiTfoplOikkPLhMiTzKsd8qentTslbITIg+Q==", "cpu": [ "arm64" ], @@ -1717,9 +1715,9 @@ } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.11.12.tgz", - "integrity": "sha512-3dlHowBgYBgi23ZBSvFHe/tD3PowEhxfVAy08NckWBeaG/e4dyrYMhAiccfuy6jkDYXEF1L2DtpRtxGImxoaPg==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.13.1.tgz", + "integrity": "sha512-JaqFdBCarIBKiMu5bbAp+kWPMNGg97ej+7KzbKOzWP5pRptqKi86kCDZT3WmjPe8hNG6dvBwbm7Y8JNry5LebQ==", "cpu": [ "arm64" ], @@ -1734,9 +1732,9 @@ } }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.11.12.tgz", - "integrity": "sha512-ToEWzLA5lXlYCbGNzMow6+uy4zhpXKQyFb3RHM8AYVb0n4pNPWvwF+8ybWDimeGBBaHJLgRQsUMuJ4NV6urSrA==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.13.1.tgz", + "integrity": "sha512-t4cLkku10YECDaakWUH0452WJHIZtrLPRwezt6BdoMntVMwNjvXRX7C8bGuYcKC3YxRW7enZKFpozLhQIQ37oA==", "cpu": [ "x64" ], @@ -1751,9 +1749,9 @@ } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.11.12.tgz", - "integrity": "sha512-N5xF+MDZr79e8gvVXX3YP1bMeaRL16Kst/R7bGUQvvCq1UGD86qMUtSr5KfCl0h5SNKP2YKtkN98HQLnGEikow==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.13.1.tgz", + "integrity": "sha512-fSMwZOaG+3ukUucbEbzz9GhzGhUhXoCPqHe9qW0/Vc2IZRp538xalygKyZynYweH5d9EHux1aj3+IO8/xBaoiA==", "cpu": [ "x64" ], @@ -1768,9 +1766,9 @@ } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.11.12.tgz", - "integrity": "sha512-/PYiyYWSQRtMoOamMfhAfq0y3RWk9LpUZ49yetJn2XI85TRkL5u2DTLLNkTPvoTiCfo0eZOJF9t5b7Z6ly0iHQ==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.13.1.tgz", + "integrity": "sha512-tweCXK/79vAwj1NhAsYgICy8T1z2QEairmN2BFEBYFBFNMEB1iI1YlXwBkBtuihRvgZrTh1ORusKa4jLYzLCZA==", "cpu": [ "arm64" ], @@ -1785,9 +1783,9 @@ } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.11.12.tgz", - "integrity": "sha512-Dxm6W4p0YVNIPnYh/Kf/9zPeaD6sVAGDQN+2c52l4m/4gR5aDgE+xg6k5lAt4ok7LDXInL3n1nwYEG7Tc4JcSQ==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.13.1.tgz", + "integrity": "sha512-zi7hO9D+2R2yQN9D7T10/CAI9KhuXkNkz8tcJOW6+dVPtAk/gsIC5NoGPELjgrAlLL9CS38ZQpLDslLfpP15ng==", "cpu": [ "ia32" ], @@ -1802,9 +1800,9 @@ } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.11.12", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.11.12.tgz", - "integrity": "sha512-PP8RSJTcda5nUHJGkbKeQ20OC+L2LxcbjYpyha1OqIFyu/qWG9zMMYVaTLKJL7zsJ14pIM/mpS3u+CJARQ+Hzw==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.13.1.tgz", + "integrity": "sha512-KubYjzqs/nz3H69ncX/XHKsC8c1xqc7UvonQAj26BhbL22HBsqdAaVutZ+Obho6RMpd3F5qQ95ldavUTWskRrw==", "cpu": [ "x64" ], @@ -1826,18 +1824,18 @@ "license": "Apache-2.0" }, "node_modules/@swc/helpers": { - "version": "0.5.15", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", - "integrity": "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==", + "version": "0.5.17", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", + "integrity": "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.8.0" } }, "node_modules/@swc/types": { - "version": "0.1.19", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.19.tgz", - "integrity": "sha512-WkAZaAfj44kh/UFdAQcrMP1I0nwRqpt27u+08LMBYMqmQfwwMofYoMh/48NGkMMRfC4ynpfwRbJuu8ErfNloeA==", + "version": "0.1.23", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.23.tgz", + "integrity": "sha512-u1iIVZV9Q0jxY+yM2vw/hZGDNudsN85bBpTqzAQ9rzkxW9D+e3aEM4Han+ow518gSewkXgjmEK0BD79ZcNVgPw==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1845,9 +1843,9 @@ } }, "node_modules/@tanstack/query-core": { - "version": "5.71.5", - "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.71.5.tgz", - "integrity": "sha512-XOQ5SyjCdwhxyLksGKWSL5poqyEXYPDnsrZAzJm2LgrMm4Yh6VOrfC+IFosXreDw9HNqC11YAMY3HlfHjNzuaA==", + "version": "5.83.1", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.83.1.tgz", + "integrity": "sha512-OG69LQgT7jSp+5pPuCfzltq/+7l2xoweggjme9vlbCPa/d7D7zaqv5vN/S82SzSYZ4EDLTxNO1PWrv49RAS64Q==", "license": "MIT", "funding": { "type": "github", @@ -1855,12 +1853,12 @@ } }, "node_modules/@tanstack/react-query": { - "version": "5.71.5", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.71.5.tgz", - "integrity": "sha512-WpxZWy4fDASjY+iAaXB+aY+LC95PQ34W6EWVkjJ0hdzWWbczFnr9nHvHkVDpwdR18I1NO8igNGQJFrLrgyzI8Q==", + "version": "5.84.1", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.84.1.tgz", + "integrity": "sha512-zo7EUygcWJMQfFNWDSG7CBhy8irje/XY0RDVKKV4IQJAysb+ZJkkJPcnQi+KboyGUgT+SQebRFoTqLuTtfoDLw==", "license": "MIT", "dependencies": { - "@tanstack/query-core": "5.71.5" + "@tanstack/query-core": "5.83.1" }, "funding": { "type": "github", @@ -1891,17 +1889,18 @@ } }, "node_modules/@testing-library/jest-dom": { - "version": "6.6.3", - "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.6.3.tgz", - "integrity": "sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==", + "version": "6.6.4", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.6.4.tgz", + "integrity": "sha512-xDXgLjVunjHqczScfkCJ9iyjdNOVHvvCdqHSSxwM9L0l/wHkTRum67SDc020uAlCoqktJplgO2AAQeLP1wgqDQ==", "dev": true, + "license": "MIT", "dependencies": { "@adobe/css-tools": "^4.4.0", "aria-query": "^5.0.0", - "chalk": "^3.0.0", "css.escape": "^1.5.1", "dom-accessibility-api": "^0.6.3", "lodash": "^4.17.21", + "picocolors": "^1.1.1", "redent": "^3.0.0" }, "engines": { @@ -1910,19 +1909,6 @@ "yarn": ">=1" } }, - "node_modules/@testing-library/jest-dom/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz", @@ -1930,9 +1916,9 @@ "dev": true }, "node_modules/@testing-library/react": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.2.0.tgz", - "integrity": "sha512-2cSskAvA1QNtKc8Y9VJQRv0tm3hLVgxRGDB+KYhIaPQJ1I+RHbhIXcM+zClKXzMes/wshsMVzf4B9vS4IZpqDQ==", + "version": "16.3.0", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.0.tgz", + "integrity": "sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==", "dev": true, "license": "MIT", "dependencies": { @@ -1958,137 +1944,40 @@ } }, "node_modules/@trivago/prettier-plugin-sort-imports": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/@trivago/prettier-plugin-sort-imports/-/prettier-plugin-sort-imports-4.3.0.tgz", - "integrity": "sha512-r3n0onD3BTOVUNPhR4lhVK4/pABGpbA7bW3eumZnYdKaHkf1qEC+Mag6DPbGNuuh0eG8AaYj+YqmVHSiGslaTQ==", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@trivago/prettier-plugin-sort-imports/-/prettier-plugin-sort-imports-5.2.2.tgz", + "integrity": "sha512-fYDQA9e6yTNmA13TLVSA+WMQRc5Bn/c0EUBditUHNfMMxN7M82c38b1kEggVE3pLpZ0FwkwJkUEKMiOi52JXFA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@babel/generator": "7.17.7", - "@babel/parser": "^7.20.5", - "@babel/traverse": "7.23.2", - "@babel/types": "7.17.0", - "javascript-natural-sort": "0.7.1", + "@babel/generator": "^7.26.5", + "@babel/parser": "^7.26.7", + "@babel/traverse": "^7.26.7", + "@babel/types": "^7.26.7", + "javascript-natural-sort": "^0.7.1", "lodash": "^4.17.21" }, + "engines": { + "node": ">18.12" + }, "peerDependencies": { "@vue/compiler-sfc": "3.x", - "prettier": "2.x - 3.x" + "prettier": "2.x - 3.x", + "prettier-plugin-svelte": "3.x", + "svelte": "4.x || 5.x" }, "peerDependenciesMeta": { "@vue/compiler-sfc": { "optional": true + }, + "prettier-plugin-svelte": { + "optional": true + }, + "svelte": { + "optional": true } } }, - "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/@babel/generator": { - "version": "7.17.7", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.17.7.tgz", - "integrity": "sha512-oLcVCTeIFadUoArDTwpluncplrYBmTCCZZgXCbgNGvOBBiSDDK3eWO4b/+eOTli5tKv1lg+a5/NAXg+nTcei1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.17.0", - "jsesc": "^2.5.1", - "source-map": "^0.5.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/@babel/traverse": { - "version": "7.23.2", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz", - "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/generator": "^7.23.0", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.23.0", - "@babel/types": "^7.23.0", - "debug": "^4.1.0", - "globals": "^11.1.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/@babel/traverse/node_modules/@babel/generator": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.0.tgz", - "integrity": "sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.27.0", - "@babel/types": "^7.27.0", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/@babel/traverse/node_modules/@babel/types": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", - "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/@babel/traverse/node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/@babel/types": { - "version": "7.17.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.17.0.tgz", - "integrity": "sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.16.7", - "to-fast-properties": "^2.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/@ts-morph/common": { "version": "0.23.0", "resolved": "https://registry.npmjs.org/@ts-morph/common/-/common-0.23.0.tgz", @@ -2151,17 +2040,29 @@ "dev": true, "peer": true }, - "node_modules/@types/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", + "node_modules/@types/chai": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, "license": "MIT" }, "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", - "dev": true + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", + "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", + "dev": true, + "license": "MIT" }, "node_modules/@types/hoist-non-react-statics": { "version": "3.3.6", @@ -2179,12 +2080,15 @@ "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", "dev": true }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", - "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "node_modules/@types/node": { + "version": "20.19.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.1.tgz", + "integrity": "sha512-jJD50LtlD2dodAEO653i3YF04NWak6jN3ky+Ri3Em3mGR39/glWiboM/IePaRbgwSfqM1TpGXfAg8ohn/4dTgA==", "dev": true, - "license": "MIT" + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } }, "node_modules/@types/parse-json": { "version": "4.0.2", @@ -2192,48 +2096,41 @@ "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", "peer": true }, - "node_modules/@types/prop-types": { - "version": "15.7.14", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.14.tgz", - "integrity": "sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ==", - "license": "MIT" - }, "node_modules/@types/react": { - "version": "18.3.20", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.20.tgz", - "integrity": "sha512-IPaCZN7PShZK/3t6Q87pfTkRm6oLTd4vztyoj+cbHUF1g3FfVb2tFIL79uCRKEfv16AhqDMBywP2VW3KIZUvcg==", + "version": "19.1.9", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.9.tgz", + "integrity": "sha512-WmdoynAX8Stew/36uTSVMcLJJ1KRh6L3IZRx1PZ7qJtBqT3dYTgyDTx8H1qoRghErydW7xw9mSJ3wS//tCRpFA==", "license": "MIT", "dependencies": { - "@types/prop-types": "*", "csstype": "^3.0.2" } }, "node_modules/@types/react-dom": { - "version": "19.1.1", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.1.tgz", - "integrity": "sha512-jFf/woGTVTjUJsl2O7hcopJ1r0upqoq/vIOoCj0yLh3RIXxWcljlpuZ+vEBRXsymD1jhfeJrlyTy/S1UW+4y1w==", + "version": "19.1.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.7.tgz", + "integrity": "sha512-i5ZzwYpqjmrKenzkoLM2Ibzt6mAsM7pxB6BCIouEVVmgiqaMj1TjaK7hnA36hbW5aZv20kx7Lw6hWzPWg0Rurw==", "dev": true, "license": "MIT", "peerDependencies": { "@types/react": "^19.0.0" } }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.29.0.tgz", - "integrity": "sha512-PAIpk/U7NIS6H7TEtN45SPGLQaHNgB7wSjsQV/8+KYokAb2T/gloOA/Bee2yd4/yKVhPKe5LlaUGhAZk5zmSaQ==", + "node_modules/@types/whatwg-mimetype": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/whatwg-mimetype/-/whatwg-mimetype-3.0.2.tgz", + "integrity": "sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.38.0.tgz", + "integrity": "sha512-WJw3AVlFFcdT9Ri1xs/lg8LwDqgekWXWhH3iAF+1ZM+QPd7oxQ6jvtW/JPwzAScxitILUIFs0/AnQ/UWHzbATQ==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.29.0", - "@typescript-eslint/type-utils": "8.29.0", - "@typescript-eslint/utils": "8.29.0", - "@typescript-eslint/visitor-keys": "8.29.0", - "graphemer": "^1.4.0", - "ignore": "^5.3.1", - "natural-compare": "^1.4.0", - "ts-api-utils": "^2.0.1" + "@typescript-eslint/types": "8.38.0", + "@typescript-eslint/visitor-keys": "8.38.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2241,47 +2138,31 @@ "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typescript-eslint/parser": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.29.0.tgz", - "integrity": "sha512-8C0+jlNJOwQso2GapCVWWfW/rzaq7Lbme+vGUFKE31djwNncIpgXD7Cd4weEsDdkoZDjH0lwwr3QDQFuyrMg9g==", + "node_modules/@typescript-eslint/types": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.38.0.tgz", + "integrity": "sha512-wzkUfX3plUqij4YwWaJyqhiPE5UCRVlFpKn1oCRn2O1bJ592XxWJj8ROQ3JD5MYXLORW84063z3tZTb/cs4Tyw==", "dev": true, "license": "MIT", - "dependencies": { - "@typescript-eslint/scope-manager": "8.29.0", - "@typescript-eslint/types": "8.29.0", - "@typescript-eslint/typescript-estree": "8.29.0", - "@typescript-eslint/visitor-keys": "8.29.0", - "debug": "^4.3.4" - }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.29.0.tgz", - "integrity": "sha512-aO1PVsq7Gm+tcghabUpzEnVSFMCU4/nYIgC2GOatJcllvWfnhrgW0ZEbnTxm36QsikmCN1K/6ZgM7fok2I7xNw==", + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.38.0.tgz", + "integrity": "sha512-pWrTcoFNWuwHlA9CvlfSsGWs14JxfN1TH25zM5L7o0pRLhsoZkDnTsXfQRJBEWJoV5DL0jf+Z+sxiud+K0mq1g==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.29.0", - "@typescript-eslint/visitor-keys": "8.29.0" + "@typescript-eslint/types": "8.38.0", + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2291,176 +2172,45 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/type-utils": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.29.0.tgz", - "integrity": "sha512-ahaWQ42JAOx+NKEf5++WC/ua17q5l+j1GFrbbpVKzFL/tKVc0aYY8rVSYUpUvt2hUP1YBr7mwXzx+E/DfUWI9Q==", + "node_modules/@vitejs/plugin-react-swc": { + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.11.0.tgz", + "integrity": "sha512-YTJCGFdNMHCMfjODYtxRNVAYmTWQ1Lb8PulP/2/f/oEEtglw8oKxKIZmmRkyXrVrHfsKOaVkAc3NT9/dMutO5w==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "8.29.0", - "@typescript-eslint/utils": "8.29.0", - "debug": "^4.3.4", - "ts-api-utils": "^2.0.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "@rolldown/pluginutils": "1.0.0-beta.27", + "@swc/core": "^1.12.11" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" + "vite": "^4 || ^5 || ^6 || ^7" } }, - "node_modules/@typescript-eslint/types": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.29.0.tgz", - "integrity": "sha512-wcJL/+cOXV+RE3gjCyl/V2G877+2faqvlgtso/ZRbTCnZazh0gXhe+7gbAnfubzN2bNsBtZjDvlh7ero8uIbzg==", + "node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", "dev": true, "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "url": "https://opencollective.com/vitest" } }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.29.0.tgz", - "integrity": "sha512-yOfen3jE9ISZR/hHpU/bmNvTtBW1NjRbkSFdZOksL1N+ybPEE7UVGMwqvS6CP022Rp00Sb0tdiIkhSCe6NI8ow==", + "node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.29.0", - "@typescript-eslint/visitor-keys": "8.29.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^2.0.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@typescript-eslint/utils": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.29.0.tgz", - "integrity": "sha512-gX/A0Mz9Bskm8avSWFcK0gP7cZpbY4AIo6B0hWYFCaIsz750oaiWR4Jr2CI+PQhfW1CpcQr9OlfPS+kMFegjXA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.29.0", - "@typescript-eslint/types": "8.29.0", - "@typescript-eslint/typescript-estree": "8.29.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.29.0.tgz", - "integrity": "sha512-Sne/pVz8ryR03NFK21VpN88dZ2FdQXOlq3VIklbrTYEt8yXtRFr9tvUhqvCeKjqYk5FSim37sHbooT6vzBTZcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.29.0", - "eslint-visitor-keys": "^4.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@vitejs/plugin-react-swc": { - "version": "3.8.1", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.8.1.tgz", - "integrity": "sha512-aEUPCckHDcFyxpwFm0AIkbtv6PpUp3xTb9wYGFjtABynXjCYKkWoxX0AOK9NT9XCrdk6mBBUOeHQS+RKdcNO1A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@swc/core": "^1.11.11" - }, - "peerDependencies": { - "vite": "^4 || ^5 || ^6" - } - }, - "node_modules/@vitest/expect": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.9.tgz", - "integrity": "sha512-5eCqRItYgIML7NNVgJj6TVCmdzE7ZVgJhruW0ziSQV4V7PvLkDL1bBkBdcTs/VuIz0IxPb5da1IDSqc1TR9eig==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/spy": "3.0.9", - "@vitest/utils": "3.0.9", - "chai": "^5.2.0", - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/mocker": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.9.tgz", - "integrity": "sha512-ryERPIBOnvevAkTq+L1lD+DTFBRcjueL9lOUfXsLfwP92h4e+Heb+PjiqS3/OURWPtywfafK0kj++yDFjWUmrA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/spy": "3.0.9", + "@vitest/spy": "3.2.4", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, @@ -2469,7 +2219,7 @@ }, "peerDependencies": { "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0" + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "peerDependenciesMeta": { "msw": { @@ -2481,9 +2231,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", - "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", "dev": true, "license": "MIT", "dependencies": { @@ -2494,14 +2244,15 @@ } }, "node_modules/@vitest/runner": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.9.tgz", - "integrity": "sha512-NX9oUXgF9HPfJSwl8tUZCMP1oGx2+Sf+ru6d05QjzQz4OwWg0psEzwY6VexP2tTHWdOkhKHUIZH+fS6nA7jfOw==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.9", - "pathe": "^2.0.3" + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" }, "funding": { "url": "https://opencollective.com/vitest" @@ -2515,13 +2266,13 @@ "license": "MIT" }, "node_modules/@vitest/snapshot": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.9.tgz", - "integrity": "sha512-AiLUiuZ0FuA+/8i19mTYd+re5jqjEc2jZbgJ2up0VY0Ddyyxg/uUtBDpIFAy4uzKaQxOW8gMgBdAJJ2ydhu39A==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.9", + "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", "pathe": "^2.0.3" }, @@ -2537,27 +2288,27 @@ "license": "MIT" }, "node_modules/@vitest/spy": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.9.tgz", - "integrity": "sha512-/CcK2UDl0aQ2wtkp3YVWldrpLRNCfVcIOFGlVGKO4R5eajsH393Z1yiXLVQ7vWsj26JOEjeZI0x5sm5P4OGUNQ==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", "dev": true, "license": "MIT", "dependencies": { - "tinyspy": "^3.0.2" + "tinyspy": "^4.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.9.tgz", - "integrity": "sha512-ilHM5fHhZ89MCp5aAaM9uhfl1c2JdxVxl3McqsdVyVNN6JffnEen8UMCdRTzOhGXNQGo5GNL9QugHrz727Wnng==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.9", - "loupe": "^3.1.3", + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" }, "funding": { @@ -2565,509 +2316,561 @@ } }, "node_modules/@zag-js/accordion": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/accordion/-/accordion-1.7.0.tgz", - "integrity": "sha512-LNJOjLTW2KwrToXBrXIbNIAiISA94n0AdWp14H8RrskdokywmEGiC0GgWTGEJ7DNA6TGP6Ae5o9rJ4fHSmCsDQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/accordion/-/accordion-1.21.0.tgz", + "integrity": "sha512-YuuQs72AmA52Hn30l3Q8KyFDb75g9glFV7AZkUq8V52vtUsdz2PfJye1FPD06M2dnnhHjEbdTQch6Qwwe5ApBA==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/anatomy": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/anatomy/-/anatomy-1.7.0.tgz", - "integrity": "sha512-fkRgH6vPCwykmRdV38uAJeTtJc8tayAnURfoovHAtB9bK0goagPbpdcYTNyGn8msul0h+KBloOtnw4obvX0nPw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/anatomy/-/anatomy-1.21.0.tgz", + "integrity": "sha512-wL5mmewTR8FJd91ZbfwiXpoMJbaQr1F1fFDel5BJgQukScNzd53HS5zhYb15eqJIOR6tlk/itPiJkxPp/+HdcQ==", "license": "MIT" }, + "node_modules/@zag-js/angle-slider": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/angle-slider/-/angle-slider-1.21.0.tgz", + "integrity": "sha512-1d4VgxYv4LQL8PtjkYqvPlx7DsZpG0CaB1woOhPZSva7jmo0WKvTAUZf2pbk9ajTm+iA4C3xHRbVRM6s2Vy/lg==", + "license": "MIT", + "dependencies": { + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/rect-utils": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" + } + }, "node_modules/@zag-js/aria-hidden": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/aria-hidden/-/aria-hidden-1.7.0.tgz", - "integrity": "sha512-YNbACFZoqw/1JymxCZXtuAFdeYZm7sK3E0jv3bPbqytPj7TziLa1dRDWDdx8cPcu0B4n4WrBMBSCGUjj/nWDCA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/aria-hidden/-/aria-hidden-1.21.0.tgz", + "integrity": "sha512-x78v+v/rNYoCFHeHK343kapdevywctNUEmPGdiH2BT3BI7uXZtv270WkD9OgdEOuEKuu18vbZ9TGYO9FGG8Ijw==", "license": "MIT" }, "node_modules/@zag-js/auto-resize": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/auto-resize/-/auto-resize-1.7.0.tgz", - "integrity": "sha512-ifWflzZc1fNJ+XUZaYpB220AiAr4l3Eczq8ELwj/ugg7T/10Wo0FkxTCVmCZfIiCMoqHuh/2oTX3PCTIwg6uxg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/auto-resize/-/auto-resize-1.21.0.tgz", + "integrity": "sha512-bQZUC5tP5SFdVcZ8vTA2tQy4B/YphwJaKCkG0Y6lHscpcPcZK7+kgBJaRj4XQuon7aKmgECLlD/da5PNNAdOJg==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0" + "@zag-js/dom-query": "1.21.0" } }, "node_modules/@zag-js/avatar": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/avatar/-/avatar-1.7.0.tgz", - "integrity": "sha512-vzMCMpYIM2BIvPvK34VaRMUsUSpg3jwoxCzA31k+QrCmjm3ti8pLoT4waE01XHiaQwNPcTFbMWUi/nIQQKG14A==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/avatar/-/avatar-1.21.0.tgz", + "integrity": "sha512-bRkEaoSbJ8Dae246cc0ShmXLBWDcJIcI1KoncST4ClYwCqyMIj4s/zgr1+XUlyz3imz6n1RhTeT2jKcBqFGC6Q==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/carousel": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/carousel/-/carousel-1.7.0.tgz", - "integrity": "sha512-bSbo00J7/4EhXKluQnCmH3dg+GjsI1dcogMNtY3Qe/hTUJI9F8ygXHWzkbEqe2iY8JkBucRm+IVdlAOGAjVARQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/carousel/-/carousel-1.21.0.tgz", + "integrity": "sha512-MpGLu6xVyPGDk5OupyTFywb85xrqCEs8qR0FpOH5eyNp3lvx/iLVNMcI+KTk5YTlZWQmDCyT86wBLMlf6SfTvw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/scroll-snap": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/scroll-snap": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/checkbox": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/checkbox/-/checkbox-1.7.0.tgz", - "integrity": "sha512-zhisqMrgKZNHyb5n4xN5JYdPU8P+duPJfy18SiHRMghi7rJrfnQZ/Ec+uEih1cGhu85juco5k9ud/AiT7bD6MA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/checkbox/-/checkbox-1.21.0.tgz", + "integrity": "sha512-lY9DYOvz0Cbdi3jxudv/nj9cpaGk784RiookL7QHr1u/Z/sUSNj5gUNpsIkSzZmT054Tu0t0jhtTt8vScq8DmQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/focus-visible": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-visible": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/clipboard": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/clipboard/-/clipboard-1.7.0.tgz", - "integrity": "sha512-rPLoIE7zKBRiHwAzSu/hT21ICMP7TmSWZGvCPV0hjtAE/sFAf/rsEwcx2DT3uBhUtoFQR7tqNRn4CnIGWkr2Fg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/clipboard/-/clipboard-1.21.0.tgz", + "integrity": "sha512-hJl4o8itwvVW3Wz5Zd/OQjR2OhXKdjHqIUuvPGbKcKEWxk6X9SDISslmCH9FbKVGVDgM6q5UypaYwwJZ1SsONQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/collapsible": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/collapsible/-/collapsible-1.7.0.tgz", - "integrity": "sha512-W6+3tAC/ilU/ffCLhdJ2bMMTuZSgHnCaLMQemUUS4kMLKUyEdXTqxKzaTEqcBQfHotsYLQUfrK57hoiAKE/UgA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/collapsible/-/collapsible-1.21.0.tgz", + "integrity": "sha512-6vdZyZauYdiedlh6hcsYDF5Q5eC/vWstbP88PzeCFSxV5hKCJKxENOTd6d4OXJuYeWGkUABdgOl5MLIZVHrYCA==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/collection": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/collection/-/collection-1.7.0.tgz", - "integrity": "sha512-gH7I03ag2niEhCVgNpXBYybnIROGXmAkX+5e1rYQ60mOh2oQnK+5k9k3DRkca5rAKbu4uT6JjYFwnY9sA/NZfA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/collection/-/collection-1.21.0.tgz", + "integrity": "sha512-wJYmazXIFnr4/azWI9yeYrK3rB1d0KoaUMhOkrmGnwfp3c0U6rrUL54RuCMeyZ9WmzIUBhjZ5zc+385nsXwlPA==", "license": "MIT", "dependencies": { - "@zag-js/utils": "1.7.0" + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/color-picker": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/color-picker/-/color-picker-1.7.0.tgz", - "integrity": "sha512-t439DB6EUrcj4f+MsLOIpttr3hsP4j3OgznJwSlwWt7Wsyhu9uX7cyevA56w4L4nt7lD1AP7305eN6AnILakjg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/color-picker/-/color-picker-1.21.0.tgz", + "integrity": "sha512-vovzxNdINPloc5SCBBwZX1/qQnvpGAs++82GUDBGdrdai/ayBYUMkP6Hd0OiStkEDunECpfDv4Qff3kobUIgpg==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/color-utils": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/color-utils": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/color-utils": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/color-utils/-/color-utils-1.7.0.tgz", - "integrity": "sha512-OvBr4v0x7/Hkts4NFychApkSoV0kDuLhRdcjm1DcHbX5DBGlptnDqGZaswbs5KMYXXH23HDgnBRWmnvmfmGDkg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/color-utils/-/color-utils-1.21.0.tgz", + "integrity": "sha512-phUCKXeDvgnSUdLtjF6oE7HRmFEqNPkKOH2Nkhlnt9Hi8uxW9xhG3Haix7DaBhCN2DLRZqpsULpCA5eYV+S8IA==", "license": "MIT", "dependencies": { - "@zag-js/utils": "1.7.0" + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/combobox": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/combobox/-/combobox-1.7.0.tgz", - "integrity": "sha512-kaMvGoBZwiFC9KaUbHXNFkneg7grZmJlteVxk6kJXYd7JGDHhhYsFznPNIC0apvBCIEqwyBGVB/lCjK+BseZtw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/combobox/-/combobox-1.21.0.tgz", + "integrity": "sha512-aVEbcRk2JilDhGJjAmmO1YI4B8lNOeqgDxsbdWDDcgivHOzo1b5Rt+5kfyodXVOlzQAPkdq04b5/xLR9eurnJw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/aria-hidden": "1.7.0", - "@zag-js/collection": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/aria-hidden": "1.21.0", + "@zag-js/collection": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/core": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/core/-/core-1.7.0.tgz", - "integrity": "sha512-FyK1POPqgBp7DBpUIwvmBQH16+L52NaTaQJzg8iTI9mI/4m3AxZ5aN+8a8qzwGIkVI6rlDcrBkmuOcHDVIOEGA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/core/-/core-1.21.0.tgz", + "integrity": "sha512-ERQklS65W2wZD7Xvm/w/7u1nL5ZcTwK6Ppwat8EfAidBGGUB6YoZLW9Vu3I04g5SPhRmDmuIXhkTqKgIbXUUYg==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/dom-query": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/date-picker": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/date-picker/-/date-picker-1.7.0.tgz", - "integrity": "sha512-64UEmdN74I4aOPS1+7zNSl0VHzUIVLDfgXw0QZ24miMM+SYVcZ1+KSVI4yeS4SETwGpdm9YkvN4z3guCtwcS+w==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/date-picker/-/date-picker-1.21.0.tgz", + "integrity": "sha512-pfZXvjuF89NfV6CTc4BayPEAujysJ5vRSVFArsDbz5oKB8j5PCRtvHEHo0WWwgF7Jr40CTmiG68wzuDMCdXq3A==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/date-utils": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/live-region": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/date-utils": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/live-region": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" }, "peerDependencies": { "@internationalized/date": ">=3.0.0" } }, "node_modules/@zag-js/date-utils": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/date-utils/-/date-utils-1.7.0.tgz", - "integrity": "sha512-zZHFx3ZuIHB38qTQzG9/frj9nFLE3JUwMkiueIVdPEgaRl7Tx5VZ3NcDKXQn9ebmXi/Zk9YOAUBr7aGXBBOAcA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/date-utils/-/date-utils-1.21.0.tgz", + "integrity": "sha512-4H0Z/zQFfpTL45rUZg3tH4lJQmsV6PDTml/ptj9I8/1Mxel5eOwBdmDfQ7owm47H7MjgUvm7CqvYT9987b0KXA==", "license": "MIT", "peerDependencies": { "@internationalized/date": ">=3.0.0" } }, "node_modules/@zag-js/dialog": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/dialog/-/dialog-1.7.0.tgz", - "integrity": "sha512-gx/CtKsPg/Y+2d+HtP3tjEdl7KM+x6lUDttjDDBn9rvXFs2REW69AlcJtRzs6B22CxDPmxssGPr1oi3zaU1AUA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/dialog/-/dialog-1.21.0.tgz", + "integrity": "sha512-nAKoCnpd40UeprYl2JazDZVL3r5uHD1L4dUEeY9GlO4CINYBvt7jntVJn1xLGm1tyc4S+kFUSgI1y1DXlS+8KQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/aria-hidden": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/focus-trap": "1.7.0", - "@zag-js/remove-scroll": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/aria-hidden": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-trap": "1.21.0", + "@zag-js/remove-scroll": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/dismissable": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/dismissable/-/dismissable-1.7.0.tgz", - "integrity": "sha512-o6S++e7iaBmizIgsvLt5RwY7gn2OQGeG2etet+oaUAMtNhi/1+uGG+rTZgOMj/MGg9BYpPld5tXfk/RrlShh9Q==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/dismissable/-/dismissable-1.21.0.tgz", + "integrity": "sha512-+BewcHUJvNCRWZ4lbUqABW6EwJRM2hxf65OPcN9XCMFCAoHbezdqHXYgtU7LRvYUJyxbvLPNeUrww3D6vcyhmA==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0", - "@zag-js/interact-outside": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/dom-query": "1.21.0", + "@zag-js/interact-outside": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/dom-query": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/dom-query/-/dom-query-1.7.0.tgz", - "integrity": "sha512-cj+mKB7Sj7mqAepHMsbV4bGvDJfUYCt4d4ruYw0dVpDa1Z9N38TtztTznfrm9kuqOYcJkgE0q3Rn/kPLi8rK8g==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/dom-query/-/dom-query-1.21.0.tgz", + "integrity": "sha512-P7Aeb1hfd5GtmTO1u0HkyVUrhFYgm94NxJhqufF2W+xByz/XspDcdy0l5pHFGsK9Urvh69S4tCx5YVh0MhZYgQ==", "license": "MIT", "dependencies": { - "@zag-js/types": "1.7.0" + "@zag-js/types": "1.21.0" } }, "node_modules/@zag-js/editable": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/editable/-/editable-1.7.0.tgz", - "integrity": "sha512-tNRDr95B+mFLk6Z8Fh0+BiDiCWsUt1iR0pIjFy88Y4YjGYd8Q71yNt1SLNKTD3DZnDGmlbRUB/4CaP+jso4aYQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/editable/-/editable-1.21.0.tgz", + "integrity": "sha512-28QivG0KU8OCgsldxi6rVLuqr36cNiuy1vTEzcoc61Ue6B1D4rCBAQaAJedl5r1ki+Vzrjl3uP1ApoUwV3S/JA==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/interact-outside": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/interact-outside": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, - "node_modules/@zag-js/element-rect": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/element-rect/-/element-rect-1.7.0.tgz", - "integrity": "sha512-j0h1+DASUI5urwBCELdjfk4oekLQ0D2v3a1wQJopGh+ITRVAC1gE1YFx3O+vnP2HwqANxG4+RQHwoQBM2bMBCQ==", - "license": "MIT" - }, - "node_modules/@zag-js/element-size": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/element-size/-/element-size-1.7.0.tgz", - "integrity": "sha512-Nq+HxG64Ts1QvaJPeDuy8zo/RqcbE95RPNVuHBwuxK3sbXOt7umgIrxQMp8uH+1xeJlp7F8/ydKOPyKOTtgiJg==", - "license": "MIT" - }, "node_modules/@zag-js/file-upload": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/file-upload/-/file-upload-1.7.0.tgz", - "integrity": "sha512-6yJhUDLYsqbd0YBO70PzMDNVJJv8OdC0ZWrf51GMUSugGfSpvQZNDfpAW5Zkzqd4B5nkJDw5KiTSR5NYQlO7VA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/file-upload/-/file-upload-1.21.0.tgz", + "integrity": "sha512-uH55bwFKcftpUYACyHT/8xB2bJdDqe3NM3JNCEYplxvn4scvDEzr2jpyVEmqUeOfrdNnyTuthNnL2hJjm4e+4A==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/file-utils": "1.7.0", - "@zag-js/i18n-utils": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/file-utils": "1.21.0", + "@zag-js/i18n-utils": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/file-utils": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/file-utils/-/file-utils-1.7.0.tgz", - "integrity": "sha512-Wb1VoI7UquG1ckJPMFPnmgLg351NI55SXjsEq+CrqgKQCo0httYFLPlkOpp4AbGsoUFZxXRxEXDEVzq5kpPFzQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/file-utils/-/file-utils-1.21.0.tgz", + "integrity": "sha512-gEWmz2ryuJMyAq3kg13TTmh5wR4Ft7d4Lb81ZeHiPpI/IwW67QrpBN0AKw3FBTmAuYBpK/dEc5iyETNPPrPTvg==", + "license": "MIT", + "dependencies": { + "@zag-js/i18n-utils": "1.21.0" + } + }, + "node_modules/@zag-js/floating-panel": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/floating-panel/-/floating-panel-1.21.0.tgz", + "integrity": "sha512-PVszFoJ53Iqmx+JD7WQFydRpp6spZFP1bCuBaHSoI044Z57UJ+rAkSlOGpoRHwpSROO9FPIpeqoTgy/kOCNmOA==", "license": "MIT", "dependencies": { - "@zag-js/i18n-utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/rect-utils": "1.21.0", + "@zag-js/store": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/focus-trap": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/focus-trap/-/focus-trap-1.7.0.tgz", - "integrity": "sha512-JHMZAfiL1aoxMAQGolx+iDMgqOMy067yffaLr1tMX55NGZPfEyXEjgxmPXRPf728/7IOShLkWLX17yacmW/w/Q==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/focus-trap/-/focus-trap-1.21.0.tgz", + "integrity": "sha512-O00KOYOVPWWv/eATfeZxRTEvUTLv+eHJH6ynqOAvQ7RXmsECst4QlL9UJwStrTKn/r2gxhj+UZMwHMEwTGNeVg==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0" + "@zag-js/dom-query": "1.21.0" } }, "node_modules/@zag-js/focus-visible": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/focus-visible/-/focus-visible-1.7.0.tgz", - "integrity": "sha512-ycrO6VetctoA7aaw83rnp3erDmQe2Zsyobzp4fzpMbOBTNWzMklt4Kz54xa1ntkia8CpSWVfoauORLlaZoDiAw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/focus-visible/-/focus-visible-1.21.0.tgz", + "integrity": "sha512-FNA7H4hyoQRBKpDkJWlBrFeyJpVphATgjvjhNXatCrrfa4F7VZiGnu3RGhEcnaw4b3bNkFnYLdRd+9XX7JHuoA==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0" + "@zag-js/dom-query": "1.21.0" } }, "node_modules/@zag-js/highlight-word": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/highlight-word/-/highlight-word-1.7.0.tgz", - "integrity": "sha512-dRw9GbMTh+CKKA4dH6n2TEmaayH2cB5Otnaawm+o+q3gkioVij8V/owWFbMZrszW6ajJX/TTdsVJ5IBdPvKhKg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/highlight-word/-/highlight-word-1.21.0.tgz", + "integrity": "sha512-bJIwPtcAMfEP6c5R/a3ZQG1V5FvYBP9onMVwKranAWPqOUj1/Y6lQ2gV/K4s7sw3VnpoXmy+5VxwfOPU/QWU5Q==", "license": "MIT" }, "node_modules/@zag-js/hover-card": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/hover-card/-/hover-card-1.7.0.tgz", - "integrity": "sha512-MqrLet1qaJfc2MEvHUWGLQ1OxgTz73gAD7oWXxnxks2Q/BXow2jU3+fVdseg3G63bmUbHXSdOkyGNo0mpHCV3Q==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/hover-card/-/hover-card-1.21.0.tgz", + "integrity": "sha512-G4+/lnc4ATU7BVHlnQ77fNC1b2k9dcbIeaBPMcdnc+g+CtqNhNTBM+rMb2OpSE9IOuFwqld5EK1v4tW8+6qOwQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/i18n-utils": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/i18n-utils/-/i18n-utils-1.7.0.tgz", - "integrity": "sha512-CcDXxfobG2LlOU1m3xPzV5pXpCe0tSE9u+drtKMz7F/HOZkR3V0rpCCi/zKySPNa3uLC7G8efz1fGQXiOVKONw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/i18n-utils/-/i18n-utils-1.21.0.tgz", + "integrity": "sha512-5E+vVsL6zcfaLlSGSnB3olXIEzmZ4C5L53+jSnx8LqmIcuTEc8I8mvBhcpTiDVHKrH6jG3jHE+6BvdyJ9SWQiA==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0" + "@zag-js/dom-query": "1.21.0" } }, "node_modules/@zag-js/interact-outside": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/interact-outside/-/interact-outside-1.7.0.tgz", - "integrity": "sha512-tmsVQmcH2N2X2mG2/8/+WRIo9WbRVvLe1OZa3lzFYV4Mu5i+tNK1CHMESpoAd/RdjJ6AyTR2zYiH05WZe76gMw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/interact-outside/-/interact-outside-1.21.0.tgz", + "integrity": "sha512-Yo4lojJYJZ4fjavOz+VbdpZlcDFAOlrOX+rKss3BNKfaffmhCklx/8Zej7WFStPCAv8AOzZ+fE4EhH/w+uPXEw==", + "license": "MIT", + "dependencies": { + "@zag-js/dom-query": "1.21.0", + "@zag-js/utils": "1.21.0" + } + }, + "node_modules/@zag-js/json-tree-utils": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/json-tree-utils/-/json-tree-utils-1.21.0.tgz", + "integrity": "sha512-OSyIxdWUVWD44hCvSgR+hP0q9nJOejS1VI9P4dbphQfcLNVvntAfwrb1os0DUR++UKBHyhAYwKVuVdThYbkJYQ==", + "license": "MIT" + }, + "node_modules/@zag-js/listbox": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/listbox/-/listbox-1.21.0.tgz", + "integrity": "sha512-XByByVOj4MA/ELcHgtkiS+jP5b2C2wXHmpCeCUp2jYKx3ZiL8al9y7yYLVBEDHRXsAR44UAQuJPIjDsCgtgkJg==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/collection": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-visible": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/live-region": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/live-region/-/live-region-1.7.0.tgz", - "integrity": "sha512-u2bYIAnBIY+GZqfPqxn2ZylOqE2blUVW7Yc2Z4Ey05K4JXSH2gKR3xPmJCS9/u8tcFKQz5L4KQ/98ntgBG2fGQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/live-region/-/live-region-1.21.0.tgz", + "integrity": "sha512-buHwgHkW95c8gYtk53AEmjS8r72AtDFRfD3l3OgMsBE/dnYYgM3bfpiZL3pP0IBK+WPKDJxS8TMj7Q7pBiQebQ==", "license": "MIT" }, "node_modules/@zag-js/menu": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/menu/-/menu-1.7.0.tgz", - "integrity": "sha512-F2XbPC0cWrmj7nLrs1/is2osaPYX9blhEiZuEcGSrWG00w6xWyPb7bFpccW2nbq87JEc58xzW1pnTzPnaAnwSQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/menu/-/menu-1.21.0.tgz", + "integrity": "sha512-usD3MQTobKlzplY3j9IZxiq6cGHUZ/N8qmmi+EKvo0xpsEimhyE+FHr9XHqmFfGsxcH/yvyuFkvEjaUrF3qsqQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/rect-utils": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/rect-utils": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/number-input": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/number-input/-/number-input-1.7.0.tgz", - "integrity": "sha512-zmStn38lscmSsX/P6hZQzan35nEstVmEGC6M3m5G+bzDRC+IR3h19yr1Ma+xXDkT1Vi21GaV0+rytf9WsYJg6Q==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/number-input/-/number-input-1.21.0.tgz", + "integrity": "sha512-77Z2tTI+PcOCaoxNoteXfLaZA0zxObrOxqAjTgwapM88kn9oGNU4Ln6AYMJqdIDZJtQWdLBGjJwi3R8h8irpNQ==", "license": "MIT", "dependencies": { - "@internationalized/number": "3.6.0", - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@internationalized/number": "3.6.3", + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/pagination": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/pagination/-/pagination-1.7.0.tgz", - "integrity": "sha512-gIbJe1fIYlQCpXqWssET9CCmMWLvcz8OCCw7W3ASeLYRvUW3IzhkMAht5pEsvJEZ9tIWaab5fZ7OLqcgCTgVQw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/pagination/-/pagination-1.21.0.tgz", + "integrity": "sha512-d3zXD17CTSsA3o+5oJB1CujEoYNph58/DHFwVFDRgH5lB5K1vBxgas+JxJ2++uhouI8BH5fz7w7X3Wr6kXEHIw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" + } + }, + "node_modules/@zag-js/password-input": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/password-input/-/password-input-1.21.0.tgz", + "integrity": "sha512-paiZbGEBlkoas08qwrpQVUuZXG8efgti/u464eZR6x7drv6PVc9igWxfqFJXL378I/cEUjj5MvYdk9yMbLJcHg==", + "license": "MIT", + "dependencies": { + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/pin-input": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/pin-input/-/pin-input-1.7.0.tgz", - "integrity": "sha512-iQfUNfbtq28zPzFjmzDs7otRbFr+kC6luQM33wALZpmmVBNXb7yi9W6R14V6NJI3to6cAaHzRzn3ixxfQJEB3w==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/pin-input/-/pin-input-1.21.0.tgz", + "integrity": "sha512-Ut3tZ4rDhjopTTdMcNm3BIpTlAu3NR1Uw1w+WM5NTh5C7Vn+GZAL5dP1dahB/t29yqhTZY4ssMxZfDofBpfMHw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/popover": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/popover/-/popover-1.7.0.tgz", - "integrity": "sha512-Nf9grOVBWlnwQL+AR6X2hAy5bTNQng9xG2Cfo4E8rD2G/CJLKtUGCHHkG8xeQ969HT4urbOrgrZ5UpAhkpNlmw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/popover/-/popover-1.21.0.tgz", + "integrity": "sha512-crDELtzKZo0hSXA1N8LFrleq/9QlSGRlUNNb0DoUW0/gFFBG3wsrLayn2gWHweeM9HBG60ZnZnBW//pXaS32sg==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/aria-hidden": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/focus-trap": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/remove-scroll": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/aria-hidden": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-trap": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/remove-scroll": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/popper": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/popper/-/popper-1.7.0.tgz", - "integrity": "sha512-1Tr9ZBS2VPeZ/zeAR5uEBYLkWn4VcycbaDDkvWxa44fi6LxknDf064cP+ql9AfUp/eUGD2hN9OSEhyxB/JXjKQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/popper/-/popper-1.21.0.tgz", + "integrity": "sha512-PWLF6kY4f88CBM+nGebPJMB3DsXcj8NDuiLdljrGL4j1x18t1dhNY1IIdNDBueJCF0VL0uJrGwcxMZg6FGReSA==", "license": "MIT", "dependencies": { - "@floating-ui/dom": "1.6.13", - "@zag-js/dom-query": "1.7.0", - "@zag-js/utils": "1.7.0" + "@floating-ui/dom": "1.7.2", + "@zag-js/dom-query": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/presence": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/presence/-/presence-1.7.0.tgz", - "integrity": "sha512-00YcVn3J0zwQ/DSEnbtbCx6UMokHXTiMF+CjNryPaaAOlLk/5s4ogEdrdguFvWxQ6zszQ3UxBh3H9pim+k7jVQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/presence/-/presence-1.21.0.tgz", + "integrity": "sha512-Fz7nhaoYbfbV6c8ovCnv75HaCD5yvU7NUxtR20wUYBPPx5nvdOViUsU+4ih/HXUcBHsQUW6teIfkf9Gb7xbCgQ==", "license": "MIT", "dependencies": { - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0" + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0" } }, "node_modules/@zag-js/progress": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/progress/-/progress-1.7.0.tgz", - "integrity": "sha512-dfjPtUGRZW0pURBalm55ACoN083EJ90cDT1RRRF72JhqlRJu/vSXngjSUFtYuG1WADGS3D7F5XIFMo+PAGynFg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/progress/-/progress-1.21.0.tgz", + "integrity": "sha512-AMZsoURX2jotI2KrODE4jw7e9FPslKIZCO/guh11D6A9gvSM3ECRe2gKdAcLjP+UKxayS8MkNPhD51bAYCfkbQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/qr-code": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/qr-code/-/qr-code-1.7.0.tgz", - "integrity": "sha512-fg/hI2Py6D4E2cvh2BJ4PunYyyivkkRga76K9VDvq+hq1OezB6SzchLjFkIXn6431VK+xrU1HqcSR67KAn8IWA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/qr-code/-/qr-code-1.21.0.tgz", + "integrity": "sha512-mCe8qp+F9ZKS9Py/CkXmfAGMc9h86UM9NkXOWwU880az885Y0Ld8UaHmyWO3AAJDWPYBkTJKq+tEqNTCKx1dyw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0", + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0", "proxy-memoize": "3.0.1", "uqr": "0.1.2" } }, "node_modules/@zag-js/radio-group": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/radio-group/-/radio-group-1.7.0.tgz", - "integrity": "sha512-9NlI5fTh8ZVX5nXm7nU/ZheQLZpHwrHZeKRjomVQQALEWuMZ5YJtVXZaUT5xsCRTk+LEQVSaKp10+aD/5cIMlA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/radio-group/-/radio-group-1.21.0.tgz", + "integrity": "sha512-TCb3RjiNhgFWzwHUns9S+z6rNyXng2kexFPmD1ycyEO1efHAb83J5aZv5ShGX/05YCZpwVMf3WsyGEV8p8c/1g==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/element-rect": "1.7.0", - "@zag-js/focus-visible": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-visible": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/rating-group": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/rating-group/-/rating-group-1.7.0.tgz", - "integrity": "sha512-jDr8M+2fXTxB9l8qm8ktA362eM6Xt6FzIz0dKlV1JsYr5KamhsZ70Y8MPB6i3b45FGdDdj02a2aaWGLRUaRnrw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/rating-group/-/rating-group-1.21.0.tgz", + "integrity": "sha512-TBjSGfHT06Ehj3lBACVB3pOnxmb+jvJQgBQUZtFYFMae+gtuKItwx9qleH24vuyqKT/DI3amQhbvpi+bUK9CVA==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/react": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/react/-/react-1.7.0.tgz", - "integrity": "sha512-phr7WMVJcwfOkiLwtobGWkdzVGdZmVQYvF7w8awloW0j1+YF2OdMYDZK8RauHwmg+sEVmqtGeZPr40hZNnKhVQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/react/-/react-1.21.0.tgz", + "integrity": "sha512-yTqpMJ2c6Sf/KqXmyq3yJg1W/VZhYn1YNBRKWYJYT/kUDnoOpyqIBbmwka0dZi/hnWdhK1pzV0UUa7oV4IWa/A==", "license": "MIT", "dependencies": { - "@zag-js/core": "1.7.0", - "@zag-js/store": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/core": "1.21.0", + "@zag-js/store": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" }, "peerDependencies": { "react": ">=18.0.0", @@ -3075,290 +2878,289 @@ } }, "node_modules/@zag-js/rect-utils": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/rect-utils/-/rect-utils-1.7.0.tgz", - "integrity": "sha512-VvpqanvSrD/a5Gf5VHCM9yhkaBFWWsYTRNNQBtguNDrOh/tFvQBFAwes/BxvT+4fG4xbBL/fbSZIyhZ77Q7L2w==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/rect-utils/-/rect-utils-1.21.0.tgz", + "integrity": "sha512-ulzlyupj7QnM5NdAHSy2uKscVanjApxcC5/FRu+ooUZRaK1A8BMqep6r7lsVB8qTz0l1ssjLqCJPGNzP3PB3ug==", "license": "MIT" }, "node_modules/@zag-js/remove-scroll": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/remove-scroll/-/remove-scroll-1.7.0.tgz", - "integrity": "sha512-sjuBT/iHUZKoDaIdEa5fn0Ii6qjPbp/xO5g/2n2gI3RhRPjcc9jmrTxuvjKftB+ZoBy4GO8MbeaPKdQLIreufg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/remove-scroll/-/remove-scroll-1.21.0.tgz", + "integrity": "sha512-wsXEM7rUJnJrTmcCHsahtLfxaas/enHOakAB98n5YZelcoFFbE+iR91brb1yUbccfryvepozOac+EIWuO8/2aw==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0" + "@zag-js/dom-query": "1.21.0" } }, "node_modules/@zag-js/scroll-snap": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/scroll-snap/-/scroll-snap-1.7.0.tgz", - "integrity": "sha512-dvRmgdnT0AR2g0RtjgVXGJG6Si4gd+os56u1x3VKzAzlMZWYiFd0iyoKFCr/SCBEEMN/Y3ppkQoZjWOlnpah2g==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/scroll-snap/-/scroll-snap-1.21.0.tgz", + "integrity": "sha512-H/8bQql4DjYFVpBG6j/EyUsdboCxyGjRzOg9SN8bA2aXNDBPh+/oLwnCWCqagd4A1VO6JxmuFmbcM2wW9Khmhw==", "license": "MIT", "dependencies": { - "@zag-js/dom-query": "1.7.0" + "@zag-js/dom-query": "1.21.0" } }, "node_modules/@zag-js/select": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/select/-/select-1.7.0.tgz", - "integrity": "sha512-DmKIfoJFO42NgZOspEww5i6j71OqHgUCCodxR0zCmMoISxi1VYYKdjjeeSqivUYoH2mk9+z+lAJF+qdCo45Mzg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/select/-/select-1.21.0.tgz", + "integrity": "sha512-wVxPzw9lmtCDWTPP0h6P8r7QL93VsyajwV0EPFKoa8HH4XWzl5QBuShXIzmD8dxbHA5HIdAZNYAC5BQCSW37Xw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/collection": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/collection": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/signature-pad": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/signature-pad/-/signature-pad-1.7.0.tgz", - "integrity": "sha512-m81iwLl0TKsFPRnPLadVIM53q6b7NJJ6fgRH8Z+TImarorV4QcA0IXr2wcj1MLlIa4CPNiXoQrmOnOdIOFHvWA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/signature-pad/-/signature-pad-1.21.0.tgz", + "integrity": "sha512-LUXHsMPXLNSaWBJ4WWY+ZSFpAbbPHfUAGOVh22bOIJWMRchcs4Cch42tFgg/sB8cREfc3G/CS5e2gIBqMigcEQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0", + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0", "perfect-freehand": "^1.2.2" } }, "node_modules/@zag-js/slider": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/slider/-/slider-1.7.0.tgz", - "integrity": "sha512-0h9ejtOWa4XjxApcCFyGt7By22kd6gG4PdUZgXiKlPCQFgYrxWXZqMlwH6ZtyD4VYUuRPJ05CezDU5KlmZD/3A==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/slider/-/slider-1.21.0.tgz", + "integrity": "sha512-dmH2j8Iu079UZf36TzfPBOYb2jGbvXHcV8x3zYiRWs4ccJDaSNBZieCWCY0/Nm5wI8l+ue/Buc1kcbpIytuWHQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/element-size": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/splitter": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/splitter/-/splitter-1.7.0.tgz", - "integrity": "sha512-iJiKgRqIp/gbzjTajLIjpoc8dVBhjrTGauwVFj2yfKlkM30lgBRBHPtnrtsVox2A5ZyTikuj2ZtMCFXJAL8BDA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/splitter/-/splitter-1.21.0.tgz", + "integrity": "sha512-blsSe3UrhEYieLF2fuO7UM0t2rQxFTeLYMSjuxFspdYZz47VnEKtVypgQUZnQX5dyttyV49vl1g7+AbBBlk6bA==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/steps": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/steps/-/steps-1.7.0.tgz", - "integrity": "sha512-niYlKAy4j7yariPVbPJwBgzWhEsE82d7JIxD4yQW1nyyM6+xAgZrJaTG6WY1ogiBLCDj5kZw1rJv1uBBF6I5EA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/steps/-/steps-1.21.0.tgz", + "integrity": "sha512-w0nzJBgYe/A04pNZN1mv1hRT44MVwwRf9VvlBFIS1CxVpUOGkDoVrzRb/CX1zpOhMdtF8w7+FfgT6Q3/oVJ4+A==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/store": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/store/-/store-1.7.0.tgz", - "integrity": "sha512-3n+AGo3Y3d1+SkEjY/6QPcDU5kfGu4DEA9qMxJgnnOlYT07SEWByMQD2uoEji9M9psHcVvxm86OnF3Y6UuTsuA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/store/-/store-1.21.0.tgz", + "integrity": "sha512-UCAuYWui3+VYfp8KdECXuM+L8tKzQYyNz+7KrRPHyZ37wgHjz4M+QNj/QP5GgDStLJaF3UgbuLYwbXSQ/3WcWw==", "license": "MIT", "dependencies": { "proxy-compare": "3.0.1" } }, "node_modules/@zag-js/switch": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/switch/-/switch-1.7.0.tgz", - "integrity": "sha512-sz3whYMAD949fJ5v9DegU43SrpUNKhoPOum4LOpoSrh364ePfm7ShsTIgJnqPrdMknr+17ljLx54tXPS1SsMTw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/switch/-/switch-1.21.0.tgz", + "integrity": "sha512-erQ05qU9UUTOKkq77X+fTBOnng75ZFugcbcx4HWkACs9aUQmh9JoRF/1+HzFvRf8SyfuEdiSP25Q+ozmiOUmXQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/focus-visible": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-visible": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/tabs": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/tabs/-/tabs-1.7.0.tgz", - "integrity": "sha512-bAMp7Vhyis5j3BSKs4m0OwsbchRLLzFf6Yaf54CNraAUdKRwLQckznrajQLPI5F+BrHkGzMXvj/lt9jlGiKDcw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/tabs/-/tabs-1.21.0.tgz", + "integrity": "sha512-ecRS8F5M6QCAln4ob8waySRmSPozbOZ5dq1GGmaVExBwbrOA4C3ZbrHU3Dhmmx8vUji+rOSRifyhHwCTY0PTqQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/element-rect": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/tags-input": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/tags-input/-/tags-input-1.7.0.tgz", - "integrity": "sha512-ME/KwP1yrPHX0bP0EqkHI30IQgrE2cAkREoRluM5ScpG3Uiug98x6+zts0YS9j1OB3pyTl0d4alECBruxN8cPA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/tags-input/-/tags-input-1.21.0.tgz", + "integrity": "sha512-i/3PvNMhUloVi2DO+CRAEHtosu/Xmjcuj7Q3wY1acTORkoyXJrynmKmUcjF2D5ySHuey+Q07ADztlpa9ZHjr8Q==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/auto-resize": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/interact-outside": "1.7.0", - "@zag-js/live-region": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/auto-resize": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/interact-outside": "1.21.0", + "@zag-js/live-region": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/time-picker": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/time-picker/-/time-picker-1.7.0.tgz", - "integrity": "sha512-oeJ/2cHUD/iNF9LVWeFZ0ZrUDpMcSjb1lScqmrDdSuBpt9Hv5NLwjKFVeCtcE7VP3ijgN1VHY5FJzqQyynK9tw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/time-picker/-/time-picker-1.21.0.tgz", + "integrity": "sha512-GIBgfHfo2pYnl9MD0fVNaJ6UE63dOs+T0DFPhBf3DazNR9r4qhK0QXQLRQyH57KD+kcjKiJNgMGRKsKbX88aEw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" }, "peerDependencies": { "@internationalized/date": ">=3.0.0" } }, "node_modules/@zag-js/timer": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/timer/-/timer-1.7.0.tgz", - "integrity": "sha512-IpFkbuyBPJl/1idCchljtpZ0PirJWHLpvoFrEnyXQ7clyIeeLuYjyMMfwG+BVWZ7BeYby9A+b0+UNksvoJLtvQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/timer/-/timer-1.21.0.tgz", + "integrity": "sha512-vFohY91xnJVV6iSkT6tESLIrFssZsE02LbnXjHEnEVajC0jXLExvIu70t+5CWmP08e2yfp7E+G9WI1cDyzS/SQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/toast": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/toast/-/toast-1.7.0.tgz", - "integrity": "sha512-tvEO1vpC9QZ0oYJOKay2dvcq5lAPn4MT7ahnALs89iVjhWyguXAs5kzoq/Devlbuhi+bUY1YxvtrMDJjYVFhaA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/toast/-/toast-1.21.0.tgz", + "integrity": "sha512-DMvdLMQFGGwNxRjnzEsszocBWreQ+4spvQTrolra9pp7PuklodnIIuxRNNQ7bQVd1wH/pQPkEwXTbusb4NMBgw==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/toggle": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/toggle/-/toggle-1.7.0.tgz", - "integrity": "sha512-94TEthfGXjNmPcIiaOlwwEm73SSI2rRVn6FPviatzQU/OcDaaiAxuvGMIkW7Ov4+1sniAElGP24LTnyz0QuQpg==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/toggle/-/toggle-1.21.0.tgz", + "integrity": "sha512-+toPS8gviWYDAatyuFOWooHts5LP368UYsubedxZAgyz+qE6Mo8j282k2iGvmzrM22WcplRXVzgZ0JYUFVPtbQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/toggle-group": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/toggle-group/-/toggle-group-1.7.0.tgz", - "integrity": "sha512-qf8S66MUSw95S65BFH+PUtPs6GCLd39MWHJqzvZSXS+UWCLNXQlK8ayrNYh6CQgtgNeyljMqc2pFGWmp+M987w==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/toggle-group/-/toggle-group-1.21.0.tgz", + "integrity": "sha512-zUxLj0sXCUixI3C7lMEekQc8jQlFd0Y70a3/MO5xC/sem3pucPS30rulcvp7b3d9TLJk8YVofpvAjdRPDyb9XA==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/tooltip": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/tooltip/-/tooltip-1.7.0.tgz", - "integrity": "sha512-ehZOewcxYZL4+ND5QMeDlQQrckssMTzxcReRCOVFXrRZb5X1jX6+ale9MSG+cJYMpQUqT2J5VtzMJH+GNj/jfw==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/tooltip/-/tooltip-1.21.0.tgz", + "integrity": "sha512-X7t93MPvB0T82HT9QRlfh+Ts8QwAeouSDmaCCrF5/tdIsMTuzEzGqWtaPbXTDfMGrsG2umlIiIVSraWDe6aAIQ==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/focus-visible": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/store": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-visible": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/store": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/tour": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/tour/-/tour-1.7.0.tgz", - "integrity": "sha512-P8wYE0OpW1GtopvQ7ELdF2SuTMI64iBSr4UYGRCt2WkbrjP0vkFp35iUEbFmE44cRKIF8jGU6gznSPCGnGjz9A==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/tour/-/tour-1.21.0.tgz", + "integrity": "sha512-441Az3byK0vP2zL67p4z5m7s/0B7uHicLdvS0rKjoI+2gZ9Qd8yGuzTSfMJY2lWn+407iswN/koY7Kz5K0srFg==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dismissable": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/focus-trap": "1.7.0", - "@zag-js/interact-outside": "1.7.0", - "@zag-js/popper": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dismissable": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/focus-trap": "1.21.0", + "@zag-js/interact-outside": "1.21.0", + "@zag-js/popper": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/tree-view": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/tree-view/-/tree-view-1.7.0.tgz", - "integrity": "sha512-ULjbcLG3PqYV5BKNW8Z9Ikh+67GblYhEscgfBN4X3BLv9KOG6J0Gp4JQkxkWBTeRpUCTnoBgZ1ZbeOFgNJbcfQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/tree-view/-/tree-view-1.21.0.tgz", + "integrity": "sha512-gMjmy+sdZsLm75pwLH8M5qCOnsXA2KIGt0lKcfL/qAhYqDVaXm6xnx43JhJxSvVvqPqDuP1W8R5vUkBtEXV5Ig==", "license": "MIT", "dependencies": { - "@zag-js/anatomy": "1.7.0", - "@zag-js/collection": "1.7.0", - "@zag-js/core": "1.7.0", - "@zag-js/dom-query": "1.7.0", - "@zag-js/types": "1.7.0", - "@zag-js/utils": "1.7.0" + "@zag-js/anatomy": "1.21.0", + "@zag-js/collection": "1.21.0", + "@zag-js/core": "1.21.0", + "@zag-js/dom-query": "1.21.0", + "@zag-js/types": "1.21.0", + "@zag-js/utils": "1.21.0" } }, "node_modules/@zag-js/types": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/types/-/types-1.7.0.tgz", - "integrity": "sha512-rmPonVc8EBOGIEJYjzWIBQ6LJwUMc3LnipRREECO+n7LNlUQUliCOFbHw1UOGP+4ZkCKmxjGFR3jLtjY8aN4gQ==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/types/-/types-1.21.0.tgz", + "integrity": "sha512-ozT8aTeqCKsPYQDqIgkjkJnXBEADvV8nj8ZuXUzm7RhIN9EqeqpQyOdA7GdYrrDY5bgmdzyzmJu+e/2PbWg/ng==", "license": "MIT", "dependencies": { "csstype": "3.1.3" } }, "node_modules/@zag-js/utils": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@zag-js/utils/-/utils-1.7.0.tgz", - "integrity": "sha512-yIxvH5V27a1WuLgCxHX7qpdtFo8vTJaZLafBpSNfVYG4B8FaxTE+P7JAcpmAzs3UyXura/WfAY2eVWWVBpk9ZA==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@zag-js/utils/-/utils-1.21.0.tgz", + "integrity": "sha512-yI/CZizbk387TdkDCy9Uc4l53uaeQuWAIJESrmAwwq6yMNbHZ2dm5+1NHdZr/guES5TgyJa/BYJsNJeCsCfesg==", "license": "MIT" }, "node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -3641,13 +3443,13 @@ } }, "node_modules/axios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz", - "integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==", + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", + "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", + "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, @@ -3718,9 +3520,9 @@ } }, "node_modules/browserslist": { - "version": "4.24.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", - "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "version": "4.25.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", + "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", "dev": true, "funding": [ { @@ -3738,10 +3540,10 @@ ], "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001688", - "electron-to-chromium": "^1.5.73", + "caniuse-lite": "^1.0.30001726", + "electron-to-chromium": "^1.5.173", "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.1" + "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" @@ -3751,13 +3553,13 @@ } }, "node_modules/builtin-modules": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", - "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-5.0.0.tgz", + "integrity": "sha512-bkXY9WsVpY7CvMhKSR6pZilZu9Ln5WDrKVBUXf2S443etkmEO4V58heTecXcUIsNsi4Rx8JUO4NfX1IcQl4deg==", "dev": true, "license": "MIT", "engines": { - "node": ">=6" + "node": ">=18.20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -3824,7 +3626,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -3872,9 +3673,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001709", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001709.tgz", - "integrity": "sha512-NgL3vUTnDrPCZ3zTahp4fsugQ4dc7EKTSzwQDPEel6DMoMnfH2jhry9n2Zm8onbSR+f/QtKHFOA+iAQu4kbtWA==", + "version": "1.0.30001727", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz", + "integrity": "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==", "dev": true, "funding": [ { @@ -3925,6 +3726,13 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/change-case": { + "version": "5.4.4", + "resolved": "https://registry.npmjs.org/change-case/-/change-case-5.4.4.tgz", + "integrity": "sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==", + "dev": true, + "license": "MIT" + }, "node_modules/check-error": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", @@ -3981,9 +3789,9 @@ } }, "node_modules/ci-info": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.2.0.tgz", - "integrity": "sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.0.tgz", + "integrity": "sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==", "dev": true, "funding": [ { @@ -4111,13 +3919,13 @@ } }, "node_modules/core-js-compat": { - "version": "3.41.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.41.0.tgz", - "integrity": "sha512-RFsU9LySVue9RTwdDVX/T0e2Y6jRYWXERKElIjpuEOEnxaXffI0X7RUwVzfYLfzuLXSNJDYoRYUAmRUcyln20A==", + "version": "3.44.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.44.0.tgz", + "integrity": "sha512-JepmAj2zfl6ogy34qfWtcE7nHKAJnKsQFRn++scjVS2bZFllwptzw61BZcZFYBPpUznLfAvh0LGhxKppk04ClA==", "dev": true, "license": "MIT", "dependencies": { - "browserslist": "^4.24.4" + "browserslist": "^4.25.1" }, "funding": { "type": "opencollective", @@ -4237,9 +4045,10 @@ } }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", "dependencies": { "ms": "^2.1.3" }, @@ -4369,7 +4178,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.1", @@ -4388,9 +4196,9 @@ "peer": true }, "node_modules/electron-to-chromium": { - "version": "1.5.130", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.130.tgz", - "integrity": "sha512-Ou2u7L9j2XLZbhqzyX0jWDj6gA8D3jIfVzt4rikLf3cGBa0VdReuFimBKS9tQJA4+XpeCxj1NoWlfBXzbMa9IA==", + "version": "1.5.189", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.189.tgz", + "integrity": "sha512-y9D1ntS1ruO/pZ/V2FtLE+JXLQe28XoRpZ7QCCo0T8LdQladzdcOVQZH/IWLVJvCw12OGMb6hYOeOAjntCmJRQ==", "dev": true, "license": "ISC" }, @@ -4404,6 +4212,7 @@ "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "peer": true, "dependencies": { "is-arrayish": "^0.2.1" } @@ -4478,7 +4287,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -4488,7 +4296,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -4523,9 +4330,9 @@ } }, "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", "dev": true, "license": "MIT" }, @@ -4533,7 +4340,6 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0" @@ -4546,7 +4352,6 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -4652,20 +4457,20 @@ } }, "node_modules/eslint": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.23.0.tgz", - "integrity": "sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw==", + "version": "9.32.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.32.0.tgz", + "integrity": "sha512-LSehfdpgMeWcTZkWZVIJl+tkZ2nuSkyyB9C27MZqFWXuph7DvaowgcTvKqxvpLW1JZIk8PN7hFY3Rj9LQ7m7lg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.2", - "@eslint/config-helpers": "^0.2.0", - "@eslint/core": "^0.12.0", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.0", + "@eslint/core": "^0.15.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.23.0", - "@eslint/plugin-kit": "^0.2.7", + "@eslint/js": "9.32.0", + "@eslint/plugin-kit": "^0.3.4", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", @@ -4676,9 +4481,9 @@ "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.3.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -4713,14 +4518,17 @@ } }, "node_modules/eslint-config-prettier": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.1.tgz", - "integrity": "sha512-4EQQr6wXwS+ZJSzaR5ZCrYgLxqvUjdXctaEtBqHcbkW944B1NQyO4qpdHQbXBONfwxXdkAY81HH4+LUfrg+zPw==", + "version": "10.1.8", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.8.tgz", + "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", "dev": true, "license": "MIT", "bin": { "eslint-config-prettier": "bin/cli.js" }, + "funding": { + "url": "https://opencollective.com/eslint-config-prettier" + }, "peerDependencies": { "eslint": ">=7.0.0" } @@ -4766,14 +4574,14 @@ } }, "node_modules/eslint-plugin-perfectionist": { - "version": "4.11.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-perfectionist/-/eslint-plugin-perfectionist-4.11.0.tgz", - "integrity": "sha512-5s+ehXydnLPQpLDj5mJ0CnYj2fQe6v6gKA3tS+FZVBLzwMOh8skH+l+1Gni08rG0SdEcNhJyjQp/mEkDYK8czw==", + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-perfectionist/-/eslint-plugin-perfectionist-4.15.0.tgz", + "integrity": "sha512-pC7PgoXyDnEXe14xvRUhBII8A3zRgggKqJFx2a82fjrItDs1BSI7zdZnQtM2yQvcyod6/ujmzb7ejKPx8lZTnw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "^8.29.0", - "@typescript-eslint/utils": "^8.29.0", + "@typescript-eslint/types": "^8.34.1", + "@typescript-eslint/utils": "^8.34.1", "natural-orderby": "^5.0.0" }, "engines": { @@ -4783,87 +4591,220 @@ "eslint": ">=8.45.0" } }, - "node_modules/eslint-plugin-prettier": { - "version": "5.2.6", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.6.tgz", - "integrity": "sha512-mUcf7QG2Tjk7H055Jk0lGBjbgDnfrvqjhXh9t2xLMSCjZVcw9Rb1V6sVNXO0th3jgeO7zllWPTNRil3JW94TnQ==", + "node_modules/eslint-plugin-perfectionist/node_modules/@typescript-eslint/project-service": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.38.0.tgz", + "integrity": "sha512-dbK7Jvqcb8c9QfH01YB6pORpqX1mn5gDZc9n63Ak/+jD67oWXn3Gs0M6vddAN+eDXBCS5EmNWzbSxsn9SzFWWg==", "dev": true, "license": "MIT", "dependencies": { - "prettier-linter-helpers": "^1.0.0", - "synckit": "^0.11.0" + "@typescript-eslint/tsconfig-utils": "^8.38.0", + "@typescript-eslint/types": "^8.38.0", + "debug": "^4.3.4" }, "engines": { - "node": "^14.18.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { - "url": "https://opencollective.com/eslint-plugin-prettier" + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@types/eslint": ">=8.0.0", - "eslint": ">=8.0.0", - "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", - "prettier": ">=3.0.0" - }, - "peerDependenciesMeta": { - "@types/eslint": { - "optional": true - }, - "eslint-config-prettier": { - "optional": true - } + "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/eslint-plugin-react": { - "version": "7.37.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.4.tgz", - "integrity": "sha512-BGP0jRmfYyvOyvMoRX/uoUeW+GqNj9y16bPQzqAHf3AYII/tDs+jMN0dBVkl88/OZwNGwrVFxE7riHsXVfy/LQ==", + "node_modules/eslint-plugin-perfectionist/node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.38.0.tgz", + "integrity": "sha512-Lum9RtSE3EroKk/bYns+sPOodqb2Fv50XOl/gMviMKNvanETUuUcC9ObRbzrJ4VSd2JalPqgSAavwrPiPvnAiQ==", "dev": true, "license": "MIT", - "dependencies": { - "array-includes": "^3.1.8", - "array.prototype.findlast": "^1.2.5", - "array.prototype.flatmap": "^1.3.3", - "array.prototype.tosorted": "^1.1.4", - "doctrine": "^2.1.0", - "es-iterator-helpers": "^1.2.1", - "estraverse": "^5.3.0", - "hasown": "^2.0.2", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.8", - "object.fromentries": "^2.0.8", - "object.values": "^1.2.1", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.5", - "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.12", - "string.prototype.repeat": "^1.0.0" - }, "engines": { - "node": ">=4" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" + "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/eslint-plugin-react-hooks": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz", - "integrity": "sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==", + "node_modules/eslint-plugin-perfectionist/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.38.0.tgz", + "integrity": "sha512-fooELKcAKzxux6fA6pxOflpNS0jc+nOQEEOipXFNjSlBS6fqrJOVY/whSn70SScHrcJ2LDsxWrneFoWYSVfqhQ==", "dev": true, "license": "MIT", - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" + "dependencies": { + "@typescript-eslint/project-service": "8.38.0", + "@typescript-eslint/tsconfig-utils": "8.38.0", + "@typescript-eslint/types": "8.38.0", + "@typescript-eslint/visitor-keys": "8.38.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/eslint-plugin-perfectionist/node_modules/@typescript-eslint/utils": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.38.0.tgz", + "integrity": "sha512-hHcMA86Hgt+ijJlrD8fX0j1j8w4C92zue/8LOPAFioIno+W0+L7KqE8QZKCcPGc/92Vs9x36w/4MPTJhqXdyvg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.38.0", + "@typescript-eslint/types": "8.38.0", + "@typescript-eslint/typescript-estree": "8.38.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/eslint-plugin-perfectionist/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/eslint-plugin-perfectionist/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/eslint-plugin-perfectionist/node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.5.3.tgz", + "integrity": "sha512-NAdMYww51ehKfDyDhv59/eIItUVzU0Io9H2E8nHNGKEeeqlnci+1gCvrHib6EmZdf6GxF+LCV5K7UC65Ezvw7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.11.7" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-plugin-prettier" + }, + "peerDependencies": { + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", + "prettier": ">=3.0.0" + }, + "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-react": { + "version": "7.37.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.5.tgz", + "integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.8", + "array.prototype.findlast": "^1.2.5", + "array.prototype.flatmap": "^1.3.3", + "array.prototype.tosorted": "^1.1.4", + "doctrine": "^2.1.0", + "es-iterator-helpers": "^1.2.1", + "estraverse": "^5.3.0", + "hasown": "^2.0.2", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.9", + "object.fromentries": "^2.0.8", + "object.values": "^1.2.1", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.12", + "string.prototype.repeat": "^1.0.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.2.0.tgz", + "integrity": "sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.19.tgz", - "integrity": "sha512-eyy8pcr/YxSYjBoqIFSrlbn9i/xvxUFa8CjzAYo9cFjgGXqq1hyjihcpZvxRLalpaWmueWR81xn7vuKmAFijDQ==", + "version": "0.4.20", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.20.tgz", + "integrity": "sha512-XpbHQ2q5gUF8BGOX4dHe+71qoirYMhApEPZ7sfhF/dNnOF1UXnCMGZf79SFTBO7Bz5YEIT4TMieSlJBWhP9WBA==", "dev": true, "license": "MIT", "peerDependencies": { @@ -4899,43 +4840,45 @@ } }, "node_modules/eslint-plugin-unicorn": { - "version": "55.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-55.0.0.tgz", - "integrity": "sha512-n3AKiVpY2/uDcGrS3+QsYDkjPfaOrNrsfQxU9nt5nitd9KuvVXrfAvgCO9DYPSfap+Gqjw9EOrXIsBp5tlHZjA==", + "version": "60.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-60.0.0.tgz", + "integrity": "sha512-QUzTefvP8stfSXsqKQ+vBQSEsXIlAiCduS/V1Em+FKgL9c21U/IIm20/e3MFy1jyCf14tHAhqC1sX8OTy6VUCg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.24.5", - "@eslint-community/eslint-utils": "^4.4.0", - "ci-info": "^4.0.0", + "@babel/helper-validator-identifier": "^7.27.1", + "@eslint-community/eslint-utils": "^4.7.0", + "@eslint/plugin-kit": "^0.3.3", + "change-case": "^5.4.4", + "ci-info": "^4.3.0", "clean-regexp": "^1.0.0", - "core-js-compat": "^3.37.0", - "esquery": "^1.5.0", - "globals": "^15.7.0", - "indent-string": "^4.0.0", - "is-builtin-module": "^3.2.1", - "jsesc": "^3.0.2", + "core-js-compat": "^3.44.0", + "esquery": "^1.6.0", + "find-up-simple": "^1.0.1", + "globals": "^16.3.0", + "indent-string": "^5.0.0", + "is-builtin-module": "^5.0.0", + "jsesc": "^3.1.0", "pluralize": "^8.0.0", - "read-pkg-up": "^7.0.1", "regexp-tree": "^0.1.27", - "regjsparser": "^0.10.0", - "semver": "^7.6.1", - "strip-indent": "^3.0.0" + "regjsparser": "^0.12.0", + "semver": "^7.7.2", + "strip-indent": "^4.0.0" }, "engines": { - "node": ">=18.18" + "node": "^20.10.0 || >=21.0.0" }, "funding": { "url": "https://github.com/sindresorhus/eslint-plugin-unicorn?sponsor=1" }, "peerDependencies": { - "eslint": ">=8.56.0" + "eslint": ">=9.29.0" } }, "node_modules/eslint-plugin-unicorn/node_modules/globals": { - "version": "15.15.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-15.15.0.tgz", - "integrity": "sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==", + "version": "16.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.3.0.tgz", + "integrity": "sha512-bqWEnJ1Nt3neqx2q5SFfGS8r/ahumIakg3HcwtNlrVlwXIeNumWn/c7Pn/wKzGhf6SaW6H6uWXLqC30STCMchQ==", "dev": true, "license": "MIT", "engines": { @@ -4945,10 +4888,39 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/eslint-plugin-unicorn/node_modules/indent-string": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", + "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/strip-indent": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", + "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-scope": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", - "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -4963,9 +4935,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -4976,15 +4948,15 @@ } }, "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "acorn": "^8.14.0", + "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -5070,10 +5042,11 @@ } }, "node_modules/expect-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.1.0.tgz", - "integrity": "sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.1.tgz", + "integrity": "sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=12.0.0" } @@ -5194,6 +5167,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/find-up-simple": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/find-up-simple/-/find-up-simple-1.0.1.tgz", + "integrity": "sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/flat-cache": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", @@ -5266,12 +5252,15 @@ } }, "node_modules/form-data": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", - "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -5359,7 +5348,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.2", @@ -5384,7 +5372,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dev": true, "license": "MIT", "dependencies": { "dunder-proto": "^1.0.1", @@ -5531,7 +5518,6 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -5578,17 +5564,18 @@ } }, "node_modules/happy-dom": { - "version": "17.4.4", - "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-17.4.4.tgz", - "integrity": "sha512-/Pb0ctk3HTZ5xEL3BZ0hK1AqDSAUuRQitOmROPHhfUYEWpmTImwfD8vFDGADmMAX0JYgbcgxWoLFKtsWhcpuVA==", + "version": "18.0.1", + "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-18.0.1.tgz", + "integrity": "sha512-qn+rKOW7KWpVTtgIUi6RVmTBZJSe2k0Db0vh1f7CWrWclkkc7/Q+FrOfkZIb2eiErLyqu5AXEzE7XthO9JVxRA==", "dev": true, "license": "MIT", "dependencies": { - "webidl-conversions": "^7.0.0", + "@types/node": "^20.0.0", + "@types/whatwg-mimetype": "^3.0.2", "whatwg-mimetype": "^3.0.0" }, "engines": { - "node": ">=18.0.0" + "node": ">=20.0.0" } }, "node_modules/has-bigints": { @@ -5646,7 +5633,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -5659,7 +5645,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, "license": "MIT", "dependencies": { "has-symbols": "^1.0.3" @@ -5690,13 +5675,6 @@ "react-is": "^16.7.0" } }, - "node_modules/hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true, - "license": "ISC" - }, "node_modules/human-signals": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", @@ -5785,7 +5763,8 @@ "node_modules/is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==" + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "peer": true }, "node_modules/is-async-function": { "version": "2.1.1", @@ -5853,16 +5832,16 @@ } }, "node_modules/is-builtin-module": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", - "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-5.0.0.tgz", + "integrity": "sha512-f4RqJKBUe5rQkJ2eJEJBXSticB3hGbN9j0yxxMQFqIW89Jp9WYFtzfTcRlstDKVUTRzSOTLKRfO9vIztenwtxA==", "dev": true, "license": "MIT", "dependencies": { - "builtin-modules": "^3.3.0" + "builtin-modules": "^5.0.0" }, "engines": { - "node": ">=6" + "node": ">=18.20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -6292,7 +6271,8 @@ "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "peer": true }, "node_modules/json-schema-traverse": { "version": "0.4.1", @@ -6368,7 +6348,8 @@ "node_modules/lines-and-columns": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "peer": true }, "node_modules/locate-path": { "version": "6.0.0", @@ -6411,9 +6392,9 @@ } }, "node_modules/loupe": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", - "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.4.tgz", + "integrity": "sha512-wJzkKwJrheKtknCOKNEtDK4iqg/MxmZheEMtSTYvnzRdEYaZzmgH976nenp8WdJRdx5Vc1X/9MO0Oszl6ezeXg==", "dev": true, "license": "MIT" }, @@ -6448,7 +6429,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -6615,9 +6595,9 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/nanoid": { - "version": "3.3.9", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.9.tgz", - "integrity": "sha512-SppoicMGpZvbF1l3z4x7No3OlIjP7QJvC9XR7AhZr1kL133KHnKPztkKDc+Ir4aJ/1VhTySrtKhrsycmrMQfvg==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true, "funding": [ { @@ -6678,29 +6658,6 @@ "dev": true, "license": "MIT" }, - "node_modules/normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/normalize-package-data/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver" - } - }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -6951,16 +6908,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/package-json-from-dist": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", @@ -6983,6 +6930,7 @@ "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "peer": true, "dependencies": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", @@ -7135,9 +7083,9 @@ } }, "node_modules/postcss": { - "version": "8.5.3", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", - "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -7155,7 +7103,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.8", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -7173,11 +7121,10 @@ } }, "node_modules/prettier": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", - "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", "dev": true, - "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" }, @@ -7309,9 +7256,9 @@ } }, "node_modules/react": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/react/-/react-19.0.0.tgz", - "integrity": "sha512-V8AVnmPIICiWpGfm6GLzCR/W5FXLchHop40W4nXBmdlEceh16rCN8O8LNWm5bh5XUX91fh7KpA+W0TgMKmgTpQ==", + "version": "19.1.1", + "resolved": "https://registry.npmjs.org/react/-/react-19.1.1.tgz", + "integrity": "sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==", "license": "MIT", "engines": { "node": ">=0.10.0" @@ -7332,21 +7279,22 @@ } }, "node_modules/react-dom": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.0.0.tgz", - "integrity": "sha512-4GV5sHFG0e/0AD4X+ySy6UJd3jVl1iNsNHdpad0qhABJ11twS3TTBnseqsKurKcsNqCEFeGL3uLpVChpIO3QfQ==", + "version": "19.1.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.1.tgz", + "integrity": "sha512-Dlq/5LAZgF0Gaz6yiqZCf6VCcZs1ghAJyrsu84Q/GT0gV+mCxbfmKNoGRKBYMJ8IEdGPqu49YWXD02GCknEDkw==", "license": "MIT", "dependencies": { - "scheduler": "^0.25.0" + "scheduler": "^0.26.0" }, "peerDependencies": { - "react": "^19.0.0" + "react": "^19.1.1" } }, "node_modules/react-hook-form": { - "version": "7.54.2", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.54.2.tgz", - "integrity": "sha512-eHpAUgUjWbZocoQYUHposymRb4ZP6d0uwUnooL2uOybA9/3tPUvoAKqEWK1WaSiTxxOfTpffNZP7QwlnM3/gEg==", + "version": "7.61.1", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.61.1.tgz", + "integrity": "sha512-2vbXUFDYgqEgM2RcXcAT2PwDW/80QARi+PKmHy5q2KhuKvOlG8iIYgf7eIlIANR5trW9fJbP4r5aub3a4egsew==", + "license": "MIT", "engines": { "node": ">=18.0.0" }, @@ -7364,15 +7312,13 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-router": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.4.0.tgz", - "integrity": "sha512-Y2g5ObjkvX3VFeVt+0CIPuYd9PpgqCslG7ASSIdN73LwA1nNWzcMLaoMRJfP3prZFI92svxFwbn7XkLJ+UPQ6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.7.1.tgz", + "integrity": "sha512-jVKHXoWRIsD/qS6lvGveckwb862EekvapdHJN/cGmzw40KnJH5gg53ujOJ4qX6EKIK9LSBfFed/xiQ5yeXNrUA==", "license": "MIT", "dependencies": { - "@types/cookie": "^0.6.0", "cookie": "^1.0.1", - "set-cookie-parser": "^2.6.0", - "turbo-stream": "2.4.0" + "set-cookie-parser": "^2.6.0" }, "engines": { "node": ">=20.0.0" @@ -7388,12 +7334,12 @@ } }, "node_modules/react-router-dom": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.4.0.tgz", - "integrity": "sha512-VlksBPf3n2bijPvnA7nkTsXxMAKOj+bWp4R9c3i+bnwlSOFAGOkJkKhzy/OsRkWaBMICqcAl1JDzh9ZSOze9CA==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.7.1.tgz", + "integrity": "sha512-bavdk2BA5r3MYalGKZ01u8PGuDBloQmzpBZVhDLrOOv1N943Wq6dcM9GhB3x8b7AbqPMEezauv4PeGkAJfy7FQ==", "license": "MIT", "dependencies": { - "react-router": "7.4.0" + "react-router": "7.7.1" }, "engines": { "node": ">=20.0.0" @@ -7403,152 +7349,52 @@ "react-dom": ">=18" } }, - "node_modules/read-pkg": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", - "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dev": true, - "license": "MIT", "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^2.5.0", - "parse-json": "^5.0.0", - "type-fest": "^0.6.0" + "picomatch": "^2.2.1" }, "engines": { - "node": ">=8" + "node": ">=8.10.0" } }, - "node_modules/read-pkg-up": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", - "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", "dev": true, - "license": "MIT", "dependencies": { - "find-up": "^4.1.0", - "read-pkg": "^5.2.0", - "type-fest": "^0.8.1" + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" }, "engines": { "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/read-pkg-up/node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", "dev": true, "license": "MIT", "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" }, "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg-up/node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg-up/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up/node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg/node_modules/type-fest": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", - "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=8" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/redent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", - "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", - "dev": true, - "dependencies": { - "indent-string": "^4.0.0", - "strip-indent": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/reflect.getprototypeof": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", - "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.9", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "get-intrinsic": "^1.2.7", - "get-proto": "^1.0.1", - "which-builtin-type": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/regenerator-runtime": { @@ -7588,31 +7434,36 @@ } }, "node_modules/regjsparser": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.10.0.tgz", - "integrity": "sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA==", + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.12.0.tgz", + "integrity": "sha512-cnE+y8bz4NhMjISKbgeVJtqNbtf5QpjZP+Bslo+UqkIt9QPnX9q095eiRRASJG1/tz6dlNr6Z5NsBiWYokp6EQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "jsesc": "~0.5.0" + "jsesc": "~3.0.2" }, "bin": { "regjsparser": "bin/parser" } }, "node_modules/regjsparser/node_modules/jsesc": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", - "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", "dev": true, + "license": "MIT", "bin": { "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" } }, "node_modules/resolve": { "version": "1.22.10", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "peer": true, "dependencies": { "is-core-module": "^2.16.0", "path-parse": "^1.0.7", @@ -7647,12 +7498,13 @@ } }, "node_modules/rollup": { - "version": "4.30.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.30.1.tgz", - "integrity": "sha512-mlJ4glW020fPuLi7DkM/lN97mYEZGWeqBnrljzN0gs7GLctqX3lNWxKQ7Gl712UAX+6fog/L3jh4gb7R6aVi3w==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.40.1.tgz", + "integrity": "sha512-C5VvvgCCyfyotVITIAv+4efVytl5F7wt+/I2i9q9GZcEXW9BP52YYOXC58igUi+LFZVHukErIIqQSWwv/M3WRw==", "dev": true, + "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.7" }, "bin": { "rollup": "dist/bin/rollup" @@ -7662,25 +7514,26 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.30.1", - "@rollup/rollup-android-arm64": "4.30.1", - "@rollup/rollup-darwin-arm64": "4.30.1", - "@rollup/rollup-darwin-x64": "4.30.1", - "@rollup/rollup-freebsd-arm64": "4.30.1", - "@rollup/rollup-freebsd-x64": "4.30.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.30.1", - "@rollup/rollup-linux-arm-musleabihf": "4.30.1", - "@rollup/rollup-linux-arm64-gnu": "4.30.1", - "@rollup/rollup-linux-arm64-musl": "4.30.1", - "@rollup/rollup-linux-loongarch64-gnu": "4.30.1", - "@rollup/rollup-linux-powerpc64le-gnu": "4.30.1", - "@rollup/rollup-linux-riscv64-gnu": "4.30.1", - "@rollup/rollup-linux-s390x-gnu": "4.30.1", - "@rollup/rollup-linux-x64-gnu": "4.30.1", - "@rollup/rollup-linux-x64-musl": "4.30.1", - "@rollup/rollup-win32-arm64-msvc": "4.30.1", - "@rollup/rollup-win32-ia32-msvc": "4.30.1", - "@rollup/rollup-win32-x64-msvc": "4.30.1", + "@rollup/rollup-android-arm-eabi": "4.40.1", + "@rollup/rollup-android-arm64": "4.40.1", + "@rollup/rollup-darwin-arm64": "4.40.1", + "@rollup/rollup-darwin-x64": "4.40.1", + "@rollup/rollup-freebsd-arm64": "4.40.1", + "@rollup/rollup-freebsd-x64": "4.40.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.40.1", + "@rollup/rollup-linux-arm-musleabihf": "4.40.1", + "@rollup/rollup-linux-arm64-gnu": "4.40.1", + "@rollup/rollup-linux-arm64-musl": "4.40.1", + "@rollup/rollup-linux-loongarch64-gnu": "4.40.1", + "@rollup/rollup-linux-powerpc64le-gnu": "4.40.1", + "@rollup/rollup-linux-riscv64-gnu": "4.40.1", + "@rollup/rollup-linux-riscv64-musl": "4.40.1", + "@rollup/rollup-linux-s390x-gnu": "4.40.1", + "@rollup/rollup-linux-x64-gnu": "4.40.1", + "@rollup/rollup-linux-x64-musl": "4.40.1", + "@rollup/rollup-win32-arm64-msvc": "4.40.1", + "@rollup/rollup-win32-ia32-msvc": "4.40.1", + "@rollup/rollup-win32-x64-msvc": "4.40.1", "fsevents": "~2.3.2" } }, @@ -7763,15 +7616,15 @@ } }, "node_modules/scheduler": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.25.0.tgz", - "integrity": "sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA==", + "version": "0.26.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", + "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", "license": "MIT" }, "node_modules/semver": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", - "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, "license": "ISC", "bin": { @@ -7955,6 +7808,7 @@ "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -7969,42 +7823,6 @@ "node": ">=0.10.0" } }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "dev": true, - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.21", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz", - "integrity": "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==", - "dev": true, - "license": "CC0-1.0" - }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -8012,10 +7830,11 @@ "dev": true }, "node_modules/std-env": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.0.tgz", - "integrity": "sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==", - "dev": true + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", + "dev": true, + "license": "MIT" }, "node_modules/string-width": { "version": "5.1.2", @@ -8264,6 +8083,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strip-literal": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/stylis": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz", @@ -8294,14 +8133,13 @@ } }, "node_modules/synckit": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.1.tgz", - "integrity": "sha512-fWZqNBZNNFp/7mTUy1fSsydhKsAKJ+u90Nk7kOK5Gcq9vObaqLBLjWFDBkyVU9Vvc6Y71VbOevMuGhqv02bT+Q==", + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.8.tgz", + "integrity": "sha512-+XZ+r1XGIJGeQk3VvXhT6xx/VpbHsRzsTkGgF6E5RX9TTXD0118l87puaEBZ566FhqblC6U0d4XnubznJDm30A==", "dev": true, "license": "MIT", "dependencies": { - "@pkgr/core": "^0.2.0", - "tslib": "^2.8.1" + "@pkgr/core": "^0.2.4" }, "engines": { "node": "^14.18.0 || >=16.0.0" @@ -8348,11 +8186,57 @@ "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", "dev": true }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tinypool": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", - "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", "dev": true, + "license": "MIT", "engines": { "node": "^18.0.0 || >=20.0.0" } @@ -8368,25 +8252,15 @@ } }, "node_modules/tinyspy": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", "dev": true, "license": "MIT", "engines": { "node": ">=14.0.0" } }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -8429,12 +8303,6 @@ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, - "node_modules/turbo-stream": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/turbo-stream/-/turbo-stream-2.4.0.tgz", - "integrity": "sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g==", - "license": "ISC" - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -8447,16 +8315,6 @@ "node": ">= 0.8.0" } }, - "node_modules/type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=8" - } - }, "node_modules/typed-array-buffer": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", @@ -8536,9 +8394,9 @@ } }, "node_modules/typescript": { - "version": "5.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", - "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", + "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", "dev": true, "license": "Apache-2.0", "bin": { @@ -8550,15 +8408,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.29.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.29.0.tgz", - "integrity": "sha512-ep9rVd9B4kQsZ7ZnWCVxUE/xDLUUUsRzE0poAeNu+4CkFErLfuvPt/qtm2EpnSyfvsR0S6QzDFSrPCFBwf64fg==", + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.38.0.tgz", + "integrity": "sha512-FsZlrYK6bPDGoLeZRuvx2v6qrM03I0U0SnfCLPs/XCCPCFD80xU9Pg09H/K+XFa68uJuZo7l/Xhs+eDRg2l3hg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.29.0", - "@typescript-eslint/parser": "8.29.0", - "@typescript-eslint/utils": "8.29.0" + "@typescript-eslint/eslint-plugin": "8.38.0", + "@typescript-eslint/parser": "8.38.0", + "@typescript-eslint/typescript-estree": "8.38.0", + "@typescript-eslint/utils": "8.38.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -8572,6 +8431,214 @@ "typescript": ">=4.8.4 <5.9.0" } }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.38.0.tgz", + "integrity": "sha512-CPoznzpuAnIOl4nhj4tRr4gIPj5AfKgkiJmGQDaq+fQnRJTYlcBjbX3wbciGmpoPf8DREufuPRe1tNMZnGdanA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.38.0", + "@typescript-eslint/type-utils": "8.38.0", + "@typescript-eslint/utils": "8.38.0", + "@typescript-eslint/visitor-keys": "8.38.0", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.38.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/type-utils": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.38.0.tgz", + "integrity": "sha512-c7jAvGEZVf0ao2z+nnz8BUaHZD09Agbh+DY7qvBQqLiz8uJzRgVPj5YvOh8I8uEiH8oIUGIfHzMwUcGVco/SJg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.38.0", + "@typescript-eslint/typescript-estree": "8.38.0", + "@typescript-eslint/utils": "8.38.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/parser": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.38.0.tgz", + "integrity": "sha512-Zhy8HCvBUEfBECzIl1PKqF4p11+d0aUJS1GeUiuqK9WmOug8YCmC4h4bjyBvMyAMI9sbRczmrYL5lKg/YMbrcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.38.0", + "@typescript-eslint/types": "8.38.0", + "@typescript-eslint/typescript-estree": "8.38.0", + "@typescript-eslint/visitor-keys": "8.38.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.38.0.tgz", + "integrity": "sha512-fooELKcAKzxux6fA6pxOflpNS0jc+nOQEEOipXFNjSlBS6fqrJOVY/whSn70SScHrcJ2LDsxWrneFoWYSVfqhQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.38.0", + "@typescript-eslint/tsconfig-utils": "8.38.0", + "@typescript-eslint/types": "8.38.0", + "@typescript-eslint/visitor-keys": "8.38.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/typescript-estree/node_modules/@typescript-eslint/project-service": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.38.0.tgz", + "integrity": "sha512-dbK7Jvqcb8c9QfH01YB6pORpqX1mn5gDZc9n63Ak/+jD67oWXn3Gs0M6vddAN+eDXBCS5EmNWzbSxsn9SzFWWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.38.0", + "@typescript-eslint/types": "^8.38.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/typescript-estree/node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.38.0.tgz", + "integrity": "sha512-Lum9RtSE3EroKk/bYns+sPOodqb2Fv50XOl/gMviMKNvanETUuUcC9ObRbzrJ4VSd2JalPqgSAavwrPiPvnAiQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/typescript-eslint/node_modules/@typescript-eslint/utils": { + "version": "8.38.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.38.0.tgz", + "integrity": "sha512-hHcMA86Hgt+ijJlrD8fX0j1j8w4C92zue/8LOPAFioIno+W0+L7KqE8QZKCcPGc/92Vs9x36w/4MPTJhqXdyvg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.38.0", + "@typescript-eslint/types": "8.38.0", + "@typescript-eslint/typescript-estree": "8.38.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/typescript-eslint/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/typescript-eslint/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/typescript-eslint/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/ufo": { "version": "1.5.4", "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.4.tgz", @@ -8610,6 +8677,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, "node_modules/universal-cookie": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/universal-cookie/-/universal-cookie-8.0.1.tgz", @@ -8666,33 +8740,25 @@ "punycode": "^2.1.0" } }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, "node_modules/vite": { - "version": "6.2.6", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.6.tgz", - "integrity": "sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.0.6.tgz", + "integrity": "sha512-MHFiOENNBd+Bd9uvc8GEsIzdkn1JxMmEeYX35tI3fv0sJBUTfW5tQsoaOwuY4KhBI09A3dUJ/DXf2yxPVPUceg==", "dev": true, "license": "MIT", "dependencies": { "esbuild": "^0.25.0", - "postcss": "^8.5.3", - "rollup": "^4.30.1" + "fdir": "^6.4.6", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.40.0", + "tinyglobby": "^0.2.14" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -8701,14 +8767,14 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", - "less": "*", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" @@ -8750,17 +8816,17 @@ } }, "node_modules/vite-node": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.9.tgz", - "integrity": "sha512-w3Gdx7jDcuT9cNn9jExXgOyKmf5UOTb6WMHz8LGAm54eS1Elf5OuBhCxl6zJxGhEeIkgsE1WbHuoL0mj/UXqXg==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", "dev": true, "license": "MIT", "dependencies": { "cac": "^6.7.14", - "debug": "^4.4.0", - "es-module-lexer": "^1.6.0", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0" + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" @@ -8788,32 +8854,63 @@ "vite": ">2.0.0-0" } }, + "node_modules/vite/node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/vitest": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz", - "integrity": "sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "3.0.9", - "@vitest/mocker": "3.0.9", - "@vitest/pretty-format": "^3.0.9", - "@vitest/runner": "3.0.9", - "@vitest/snapshot": "3.0.9", - "@vitest/spy": "3.0.9", - "@vitest/utils": "3.0.9", + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", "chai": "^5.2.0", - "debug": "^4.4.0", - "expect-type": "^1.1.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", "magic-string": "^0.30.17", "pathe": "^2.0.3", - "std-env": "^3.8.0", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", - "tinypool": "^1.0.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0", - "vite-node": "3.0.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", "why-is-node-running": "^2.3.0" }, "bin": { @@ -8829,8 +8926,8 @@ "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.0.9", - "@vitest/ui": "3.0.9", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", "happy-dom": "*", "jsdom": "*" }, @@ -8865,13 +8962,17 @@ "dev": true, "license": "MIT" }, - "node_modules/webidl-conversions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", - "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "node_modules/vitest/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/whatwg-mimetype": { diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json index 6c3a49a18a635..923e39abaae1f 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json @@ -4,53 +4,53 @@ "version": "0.0.0", "type": "module", "scripts": { - "dev": "vite --port 5174", + "dev": "vite --port 5174 --strictPort", "build": "vite build", "lint": "eslint --quiet && tsc --p tsconfig.app.json", "lint:fix": "eslint --fix && tsc --p tsconfig.app.json", "format": "pnpm prettier --write .", "preview": "vite preview", - "codegen": "openapi-rq -i \"../openapi/v1-simple-auth-manager-generated.yaml\" -c axios --format prettier -o openapi-gen --operationId", + "codegen": "openapi-rq -i \"../openapi/v2-simple-auth-manager-generated.yaml\" -c axios --format prettier -o openapi-gen --operationId", "test": "vitest run", "coverage": "vitest run --coverage" }, "dependencies": { - "@chakra-ui/react": "^3.14.2", - "@tanstack/react-query": "^5.70.0", - "axios": "^1.8.4", + "@chakra-ui/react": "^3.24.0", + "@tanstack/react-query": "^5.84.1", + "axios": "^1.11.0", "next-themes": "^0.4.6", - "react": "^19.0.0", + "react": "^19.1.1", "react-cookie": "^8.0.1", - "react-dom": "^19.0.0", - "react-hook-form": "^7.54.2", - "react-router-dom": "^7.4.0" + "react-dom": "^19.1.1", + "react-hook-form": "^7.61.1", + "react-router-dom": "^7.7.1" }, "devDependencies": { "@7nohe/openapi-react-query-codegen": "^1.6.2", - "@eslint/compat": "^1.2.7", - "@eslint/js": "^9.23.0", - "@stylistic/eslint-plugin": "^2.13.0", - "@testing-library/jest-dom": "^6.6.3", - "@testing-library/react": "^16.2.0", - "@trivago/prettier-plugin-sort-imports": "^4.3.0", - "@types/react": "^18.3.19", - "@types/react-dom": "^19.0.0", - "@vitejs/plugin-react-swc": "^3.8.1", - "eslint": "^9.23.0", - "eslint-config-prettier": "^10.1.1", + "@eslint/compat": "^1.3.1", + "@eslint/js": "^9.32.0", + "@stylistic/eslint-plugin": "^5.2.2", + "@testing-library/jest-dom": "^6.6.4", + "@testing-library/react": "^16.3.0", + "@trivago/prettier-plugin-sort-imports": "^5.2.2", + "@types/react": "^19.1.9", + "@types/react-dom": "^19.1.7", + "@vitejs/plugin-react-swc": "^3.11.0", + "eslint": "^9.32.0", + "eslint-config-prettier": "^10.1.8", "eslint-plugin-jsx-a11y": "^6.10.2", - "eslint-plugin-perfectionist": "^4.10.1", - "eslint-plugin-prettier": "^5.2.5", - "eslint-plugin-react": "^7.37.4", - "eslint-plugin-react-hooks": "^4.6.2", - "eslint-plugin-react-refresh": "^0.4.19", - "eslint-plugin-unicorn": "^55.0.0", - "happy-dom": "^17.4.4", - "prettier": "^3.5.3", - "typescript": "~5.5.4", - "typescript-eslint": "^8.27.0", - "vite": "^6.2.6", + "eslint-plugin-perfectionist": "^4.15.0", + "eslint-plugin-prettier": "^5.5.3", + "eslint-plugin-react": "^7.37.5", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.20", + "eslint-plugin-unicorn": "^60.0.0", + "happy-dom": "^18.0.1", + "prettier": "^3.6.2", + "typescript": "~5.8.3", + "typescript-eslint": "^8.38.0", + "vite": "^7.0.6", "vite-plugin-css-injected-by-js": "^3.5.2", - "vitest": "^3.0.9" + "vitest": "^3.2.4" } } diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml index 87f498791a11d..9d15032256f1e 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml @@ -9,111 +9,111 @@ importers: .: dependencies: '@chakra-ui/react': - specifier: ^3.14.2 - version: 3.15.0(@emotion/react@11.14.0(@types/react@18.3.20)(react@19.0.0))(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + specifier: ^3.24.0 + version: 3.24.0(@emotion/react@11.14.0(@types/react@19.1.9)(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@tanstack/react-query': - specifier: ^5.70.0 - version: 5.71.1(react@19.0.0) + specifier: ^5.84.1 + version: 5.84.1(react@19.1.1) axios: - specifier: ^1.8.4 - version: 1.8.4 + specifier: ^1.11.0 + version: 1.11.0 next-themes: specifier: ^0.4.6 - version: 0.4.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + version: 0.4.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react: - specifier: ^19.0.0 - version: 19.0.0 + specifier: ^19.1.1 + version: 19.1.1 react-cookie: specifier: ^8.0.1 - version: 8.0.1(react@19.0.0) + version: 8.0.1(react@19.1.1) react-dom: - specifier: ^19.0.0 - version: 19.0.0(react@19.0.0) + specifier: ^19.1.1 + version: 19.1.1(react@19.1.1) react-hook-form: - specifier: ^7.54.2 - version: 7.54.2(react@19.0.0) + specifier: ^7.61.1 + version: 7.61.1(react@19.1.1) react-router-dom: - specifier: ^7.4.0 - version: 7.4.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + specifier: ^7.7.1 + version: 7.7.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1) devDependencies: '@7nohe/openapi-react-query-codegen': specifier: ^1.6.2 - version: 1.6.2(commander@12.1.0)(glob@10.4.5)(magicast@0.3.5)(ts-morph@22.0.0)(typescript@5.5.4) + version: 1.6.2(commander@12.1.0)(glob@10.4.5)(magicast@0.3.5)(ts-morph@22.0.0)(typescript@5.8.3) '@eslint/compat': - specifier: ^1.2.7 - version: 1.2.7(eslint@9.23.0(jiti@1.21.7)) + specifier: ^1.3.1 + version: 1.3.1(eslint@9.32.0(jiti@1.21.7)) '@eslint/js': - specifier: ^9.23.0 - version: 9.23.0 + specifier: ^9.32.0 + version: 9.32.0 '@stylistic/eslint-plugin': - specifier: ^2.13.0 - version: 2.13.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + specifier: ^5.2.2 + version: 5.2.2(eslint@9.32.0(jiti@1.21.7)) '@testing-library/jest-dom': - specifier: ^6.6.3 - version: 6.6.3 + specifier: ^6.6.4 + version: 6.6.4 '@testing-library/react': - specifier: ^16.2.0 - version: 16.2.0(@testing-library/dom@10.4.0)(@types/react-dom@19.0.0)(@types/react@18.3.20)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + specifier: ^16.3.0 + version: 16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@19.1.7(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@trivago/prettier-plugin-sort-imports': - specifier: ^4.3.0 - version: 4.3.0(prettier@3.5.3) + specifier: ^5.2.2 + version: 5.2.2(prettier@3.6.2) '@types/react': - specifier: ^18.3.19 - version: 18.3.20 + specifier: ^19.1.9 + version: 19.1.9 '@types/react-dom': - specifier: ^19.0.0 - version: 19.0.0 + specifier: ^19.1.7 + version: 19.1.7(@types/react@19.1.9) '@vitejs/plugin-react-swc': - specifier: ^3.8.1 - version: 3.8.1(@swc/helpers@0.5.15)(vite@6.2.6(jiti@1.21.7)) + specifier: ^3.11.0 + version: 3.11.0(@swc/helpers@0.5.17)(vite@7.0.6(@types/node@20.19.1)(jiti@1.21.7)) eslint: - specifier: ^9.23.0 - version: 9.23.0(jiti@1.21.7) + specifier: ^9.32.0 + version: 9.32.0(jiti@1.21.7) eslint-config-prettier: - specifier: ^10.1.1 - version: 10.1.1(eslint@9.23.0(jiti@1.21.7)) + specifier: ^10.1.8 + version: 10.1.8(eslint@9.32.0(jiti@1.21.7)) eslint-plugin-jsx-a11y: specifier: ^6.10.2 - version: 6.10.2(eslint@9.23.0(jiti@1.21.7)) + version: 6.10.2(eslint@9.32.0(jiti@1.21.7)) eslint-plugin-perfectionist: - specifier: ^4.10.1 - version: 4.10.1(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + specifier: ^4.15.0 + version: 4.15.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) eslint-plugin-prettier: - specifier: ^5.2.5 - version: 5.2.5(eslint-config-prettier@10.1.1(eslint@9.23.0(jiti@1.21.7)))(eslint@9.23.0(jiti@1.21.7))(prettier@3.5.3) + specifier: ^5.5.3 + version: 5.5.3(eslint-config-prettier@10.1.8(eslint@9.32.0(jiti@1.21.7)))(eslint@9.32.0(jiti@1.21.7))(prettier@3.6.2) eslint-plugin-react: - specifier: ^7.37.4 - version: 7.37.4(eslint@9.23.0(jiti@1.21.7)) + specifier: ^7.37.5 + version: 7.37.5(eslint@9.32.0(jiti@1.21.7)) eslint-plugin-react-hooks: - specifier: ^4.6.2 - version: 4.6.2(eslint@9.23.0(jiti@1.21.7)) + specifier: ^5.2.0 + version: 5.2.0(eslint@9.32.0(jiti@1.21.7)) eslint-plugin-react-refresh: - specifier: ^0.4.19 - version: 0.4.19(eslint@9.23.0(jiti@1.21.7)) + specifier: ^0.4.20 + version: 0.4.20(eslint@9.32.0(jiti@1.21.7)) eslint-plugin-unicorn: - specifier: ^55.0.0 - version: 55.0.0(eslint@9.23.0(jiti@1.21.7)) + specifier: ^60.0.0 + version: 60.0.0(eslint@9.32.0(jiti@1.21.7)) happy-dom: - specifier: ^17.4.4 - version: 17.4.4 + specifier: ^18.0.1 + version: 18.0.1 prettier: - specifier: ^3.5.3 - version: 3.5.3 + specifier: ^3.6.2 + version: 3.6.2 typescript: - specifier: ~5.5.4 - version: 5.5.4 + specifier: ~5.8.3 + version: 5.8.3 typescript-eslint: - specifier: ^8.27.0 - version: 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + specifier: ^8.38.0 + version: 8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) vite: - specifier: ^6.2.6 - version: 6.2.6(jiti@1.21.7) + specifier: ^7.0.6 + version: 7.0.6(@types/node@20.19.1)(jiti@1.21.7) vite-plugin-css-injected-by-js: specifier: ^3.5.2 - version: 3.5.2(vite@6.2.6(jiti@1.21.7)) + version: 3.5.2(vite@7.0.6(@types/node@20.19.1)(jiti@1.21.7)) vitest: - specifier: ^3.0.9 - version: 3.0.9(happy-dom@17.4.4)(jiti@1.21.7) + specifier: ^3.2.4 + version: 3.2.4(@types/node@20.19.1)(happy-dom@18.0.1)(jiti@1.21.7) packages: @@ -127,94 +127,91 @@ packages: ts-morph: 22.x typescript: 5.x - '@adobe/css-tools@4.4.2': - resolution: {integrity: sha512-baYZExFpsdkBNuvGKTKWCwKH57HRZLVtycZS05WTQNVOiXVSeAki3nU35zlRbToeMW8aHlJfyS+1C4BOv27q0A==} + '@adobe/css-tools@4.4.3': + resolution: {integrity: sha512-VQKMkwriZbaOgVCby1UDY/LDk5fIjhQicCvVPFqfe+69fWaPWydbWJ3wRt59/YzIwda1I81loas3oCoHxnqvdA==} '@apidevtools/json-schema-ref-parser@11.6.4': resolution: {integrity: sha512-9K6xOqeevacvweLGik6LnZCb1fBtCOSIWQs8d096XGeqoLKC33UVMGz9+77Gw44KvbH4pKcQPWo4ZpxkXYj05w==} engines: {node: '>= 16'} - '@ark-ui/react@5.4.0': - resolution: {integrity: sha512-TatFGOb6zKx4a363jg3McQY+2/wEcUZgTHZTomueFMR+JgqHR98aAFnCPvi2L5UF+326qXEWHxHIPlQLwFUb1A==} + '@ark-ui/react@5.18.2': + resolution: {integrity: sha512-vM2cuKSIe4mCDfqMc4RggsmiulXbicTjpZLf1IUXSHcUluMVn+z2k1minKI4X+Z7XSoKH0To7asxS0nJ1UPODA==} peerDependencies: react: '>=18.0.0' react-dom: '>=18.0.0' - '@babel/code-frame@7.26.2': - resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==} + '@babel/code-frame@7.27.1': + resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} engines: {node: '>=6.9.0'} - '@babel/generator@7.17.7': - resolution: {integrity: sha512-oLcVCTeIFadUoArDTwpluncplrYBmTCCZZgXCbgNGvOBBiSDDK3eWO4b/+eOTli5tKv1lg+a5/NAXg+nTcei1w==} + '@babel/generator@7.27.1': + resolution: {integrity: sha512-UnJfnIpc/+JO0/+KRVQNGU+y5taA5vCbwN8+azkX6beii/ZF+enZJSOKo11ZSzGJjlNfJHfQtmQT8H+9TXPG2w==} engines: {node: '>=6.9.0'} - '@babel/generator@7.27.0': - resolution: {integrity: sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==} + '@babel/generator@7.28.0': + resolution: {integrity: sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==} engines: {node: '>=6.9.0'} - '@babel/helper-environment-visitor@7.24.7': - resolution: {integrity: sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==} + '@babel/helper-globals@7.28.0': + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} engines: {node: '>=6.9.0'} - '@babel/helper-function-name@7.24.7': - resolution: {integrity: sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA==} + '@babel/helper-module-imports@7.27.1': + resolution: {integrity: sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==} engines: {node: '>=6.9.0'} - '@babel/helper-hoist-variables@7.24.7': - resolution: {integrity: sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==} + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} engines: {node: '>=6.9.0'} - '@babel/helper-module-imports@7.25.9': - resolution: {integrity: sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==} + '@babel/helper-validator-identifier@7.27.1': + resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} engines: {node: '>=6.9.0'} - '@babel/helper-split-export-declaration@7.24.7': - resolution: {integrity: sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==} - engines: {node: '>=6.9.0'} - - '@babel/helper-string-parser@7.25.9': - resolution: {integrity: sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==} - engines: {node: '>=6.9.0'} - - '@babel/helper-validator-identifier@7.25.9': - resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==} - engines: {node: '>=6.9.0'} + '@babel/parser@7.27.1': + resolution: {integrity: sha512-I0dZ3ZpCrJ1c04OqlNsQcKiZlsrXf/kkE4FXzID9rIOYICsAbA8mMDzhW/luRNAHdCNt7os/u8wenklZDlUVUQ==} + engines: {node: '>=6.0.0'} + hasBin: true - '@babel/parser@7.27.0': - resolution: {integrity: sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==} + '@babel/parser@7.28.0': + resolution: {integrity: sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==} engines: {node: '>=6.0.0'} hasBin: true - '@babel/runtime@7.26.10': - resolution: {integrity: sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==} + '@babel/runtime@7.27.1': + resolution: {integrity: sha512-1x3D2xEk2fRo3PAhwQwu5UubzgiVWSXTBfWpVd2Mx2AzRqJuDJCsgaDVZ7HB5iGzDW1Hl1sWN2mFyKjmR9uAog==} + engines: {node: '>=6.9.0'} + + '@babel/runtime@7.28.2': + resolution: {integrity: sha512-KHp2IflsnGywDjBWDkR9iEqiWSpc8GIi0lgTT3mOElT0PP1tG26P4tmFI2YvAdzgq9RGyoHZQEIEdZy6Ec5xCA==} engines: {node: '>=6.9.0'} - '@babel/runtime@7.27.0': - resolution: {integrity: sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==} + '@babel/template@7.27.1': + resolution: {integrity: sha512-Fyo3ghWMqkHHpHQCoBs2VnYjR4iWFFjguTDEqA5WgZDOrFesVjMhMM2FSqTKSoUSDO1VQtavj8NFpdRBEvJTtg==} engines: {node: '>=6.9.0'} - '@babel/template@7.27.0': - resolution: {integrity: sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==} + '@babel/template@7.27.2': + resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} engines: {node: '>=6.9.0'} - '@babel/traverse@7.23.2': - resolution: {integrity: sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==} + '@babel/traverse@7.27.1': + resolution: {integrity: sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==} engines: {node: '>=6.9.0'} - '@babel/traverse@7.27.0': - resolution: {integrity: sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==} + '@babel/traverse@7.28.0': + resolution: {integrity: sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==} engines: {node: '>=6.9.0'} - '@babel/types@7.17.0': - resolution: {integrity: sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw==} + '@babel/types@7.27.1': + resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==} engines: {node: '>=6.9.0'} - '@babel/types@7.27.0': - resolution: {integrity: sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==} + '@babel/types@7.28.2': + resolution: {integrity: sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==} engines: {node: '>=6.9.0'} - '@chakra-ui/react@3.15.0': - resolution: {integrity: sha512-U7mR9ru5Vhpat57nP04lenVDtaMzPKfKedhBDkesk5VUbzr5euWygjspa/tTO37ew7t7Q/pyUovXAizoWEzZ1g==} + '@chakra-ui/react@3.24.0': + resolution: {integrity: sha512-fkKXtPJ2WVwgDAL50W2yHLzGrv8YAY6g09yrIMU8LuUkTa+xSlQJadAtuqARc0TqxTha+RL2rjPZkWvL/f0I6w==} peerDependencies: '@emotion/react': '>=11' react: '>=18' @@ -264,158 +261,164 @@ packages: '@emotion/weak-memoize@0.4.0': resolution: {integrity: sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg==} - '@esbuild/aix-ppc64@0.25.2': - resolution: {integrity: sha512-wCIboOL2yXZym2cgm6mlA742s9QeJ8DjGVaL39dLN4rRwrOgOyYSnOaFPhKZGLb2ngj4EyfAFjsNJwPXZvseag==} + '@esbuild/aix-ppc64@0.25.8': + resolution: {integrity: sha512-urAvrUedIqEiFR3FYSLTWQgLu5tb+m0qZw0NBEasUeo6wuqatkMDaRT+1uABiGXEu5vqgPd7FGE1BhsAIy9QVA==} engines: {node: '>=18'} cpu: [ppc64] os: [aix] - '@esbuild/android-arm64@0.25.2': - resolution: {integrity: sha512-5ZAX5xOmTligeBaeNEPnPaeEuah53Id2tX4c2CVP3JaROTH+j4fnfHCkr1PjXMd78hMst+TlkfKcW/DlTq0i4w==} + '@esbuild/android-arm64@0.25.8': + resolution: {integrity: sha512-OD3p7LYzWpLhZEyATcTSJ67qB5D+20vbtr6vHlHWSQYhKtzUYrETuWThmzFpZtFsBIxRvhO07+UgVA9m0i/O1w==} engines: {node: '>=18'} cpu: [arm64] os: [android] - '@esbuild/android-arm@0.25.2': - resolution: {integrity: sha512-NQhH7jFstVY5x8CKbcfa166GoV0EFkaPkCKBQkdPJFvo5u+nGXLEH/ooniLb3QI8Fk58YAx7nsPLozUWfCBOJA==} + '@esbuild/android-arm@0.25.8': + resolution: {integrity: sha512-RONsAvGCz5oWyePVnLdZY/HHwA++nxYWIX1atInlaW6SEkwq6XkP3+cb825EUcRs5Vss/lGh/2YxAb5xqc07Uw==} engines: {node: '>=18'} cpu: [arm] os: [android] - '@esbuild/android-x64@0.25.2': - resolution: {integrity: sha512-Ffcx+nnma8Sge4jzddPHCZVRvIfQ0kMsUsCMcJRHkGJ1cDmhe4SsrYIjLUKn1xpHZybmOqCWwB0zQvsjdEHtkg==} + '@esbuild/android-x64@0.25.8': + resolution: {integrity: sha512-yJAVPklM5+4+9dTeKwHOaA+LQkmrKFX96BM0A/2zQrbS6ENCmxc4OVoBs5dPkCCak2roAD+jKCdnmOqKszPkjA==} engines: {node: '>=18'} cpu: [x64] os: [android] - '@esbuild/darwin-arm64@0.25.2': - resolution: {integrity: sha512-MpM6LUVTXAzOvN4KbjzU/q5smzryuoNjlriAIx+06RpecwCkL9JpenNzpKd2YMzLJFOdPqBpuub6eVRP5IgiSA==} + '@esbuild/darwin-arm64@0.25.8': + resolution: {integrity: sha512-Jw0mxgIaYX6R8ODrdkLLPwBqHTtYHJSmzzd+QeytSugzQ0Vg4c5rDky5VgkoowbZQahCbsv1rT1KW72MPIkevw==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] - '@esbuild/darwin-x64@0.25.2': - resolution: {integrity: sha512-5eRPrTX7wFyuWe8FqEFPG2cU0+butQQVNcT4sVipqjLYQjjh8a8+vUTfgBKM88ObB85ahsnTwF7PSIt6PG+QkA==} + '@esbuild/darwin-x64@0.25.8': + resolution: {integrity: sha512-Vh2gLxxHnuoQ+GjPNvDSDRpoBCUzY4Pu0kBqMBDlK4fuWbKgGtmDIeEC081xi26PPjn+1tct+Bh8FjyLlw1Zlg==} engines: {node: '>=18'} cpu: [x64] os: [darwin] - '@esbuild/freebsd-arm64@0.25.2': - resolution: {integrity: sha512-mLwm4vXKiQ2UTSX4+ImyiPdiHjiZhIaE9QvC7sw0tZ6HoNMjYAqQpGyui5VRIi5sGd+uWq940gdCbY3VLvsO1w==} + '@esbuild/freebsd-arm64@0.25.8': + resolution: {integrity: sha512-YPJ7hDQ9DnNe5vxOm6jaie9QsTwcKedPvizTVlqWG9GBSq+BuyWEDazlGaDTC5NGU4QJd666V0yqCBL2oWKPfA==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-x64@0.25.2': - resolution: {integrity: sha512-6qyyn6TjayJSwGpm8J9QYYGQcRgc90nmfdUb0O7pp1s4lTY+9D0H9O02v5JqGApUyiHOtkz6+1hZNvNtEhbwRQ==} + '@esbuild/freebsd-x64@0.25.8': + resolution: {integrity: sha512-MmaEXxQRdXNFsRN/KcIimLnSJrk2r5H8v+WVafRWz5xdSVmWLoITZQXcgehI2ZE6gioE6HirAEToM/RvFBeuhw==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] - '@esbuild/linux-arm64@0.25.2': - resolution: {integrity: sha512-gq/sjLsOyMT19I8obBISvhoYiZIAaGF8JpeXu1u8yPv8BE5HlWYobmlsfijFIZ9hIVGYkbdFhEqC0NvM4kNO0g==} + '@esbuild/linux-arm64@0.25.8': + resolution: {integrity: sha512-WIgg00ARWv/uYLU7lsuDK00d/hHSfES5BzdWAdAig1ioV5kaFNrtK8EqGcUBJhYqotlUByUKz5Qo6u8tt7iD/w==} engines: {node: '>=18'} cpu: [arm64] os: [linux] - '@esbuild/linux-arm@0.25.2': - resolution: {integrity: sha512-UHBRgJcmjJv5oeQF8EpTRZs/1knq6loLxTsjc3nxO9eXAPDLcWW55flrMVc97qFPbmZP31ta1AZVUKQzKTzb0g==} + '@esbuild/linux-arm@0.25.8': + resolution: {integrity: sha512-FuzEP9BixzZohl1kLf76KEVOsxtIBFwCaLupVuk4eFVnOZfU+Wsn+x5Ryam7nILV2pkq2TqQM9EZPsOBuMC+kg==} engines: {node: '>=18'} cpu: [arm] os: [linux] - '@esbuild/linux-ia32@0.25.2': - resolution: {integrity: sha512-bBYCv9obgW2cBP+2ZWfjYTU+f5cxRoGGQ5SeDbYdFCAZpYWrfjjfYwvUpP8MlKbP0nwZ5gyOU/0aUzZ5HWPuvQ==} + '@esbuild/linux-ia32@0.25.8': + resolution: {integrity: sha512-A1D9YzRX1i+1AJZuFFUMP1E9fMaYY+GnSQil9Tlw05utlE86EKTUA7RjwHDkEitmLYiFsRd9HwKBPEftNdBfjg==} engines: {node: '>=18'} cpu: [ia32] os: [linux] - '@esbuild/linux-loong64@0.25.2': - resolution: {integrity: sha512-SHNGiKtvnU2dBlM5D8CXRFdd+6etgZ9dXfaPCeJtz+37PIUlixvlIhI23L5khKXs3DIzAn9V8v+qb1TRKrgT5w==} + '@esbuild/linux-loong64@0.25.8': + resolution: {integrity: sha512-O7k1J/dwHkY1RMVvglFHl1HzutGEFFZ3kNiDMSOyUrB7WcoHGf96Sh+64nTRT26l3GMbCW01Ekh/ThKM5iI7hQ==} engines: {node: '>=18'} cpu: [loong64] os: [linux] - '@esbuild/linux-mips64el@0.25.2': - resolution: {integrity: sha512-hDDRlzE6rPeoj+5fsADqdUZl1OzqDYow4TB4Y/3PlKBD0ph1e6uPHzIQcv2Z65u2K0kpeByIyAjCmjn1hJgG0Q==} + '@esbuild/linux-mips64el@0.25.8': + resolution: {integrity: sha512-uv+dqfRazte3BzfMp8PAQXmdGHQt2oC/y2ovwpTteqrMx2lwaksiFZ/bdkXJC19ttTvNXBuWH53zy/aTj1FgGw==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] - '@esbuild/linux-ppc64@0.25.2': - resolution: {integrity: sha512-tsHu2RRSWzipmUi9UBDEzc0nLc4HtpZEI5Ba+Omms5456x5WaNuiG3u7xh5AO6sipnJ9r4cRWQB2tUjPyIkc6g==} + '@esbuild/linux-ppc64@0.25.8': + resolution: {integrity: sha512-GyG0KcMi1GBavP5JgAkkstMGyMholMDybAf8wF5A70CALlDM2p/f7YFE7H92eDeH/VBtFJA5MT4nRPDGg4JuzQ==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] - '@esbuild/linux-riscv64@0.25.2': - resolution: {integrity: sha512-k4LtpgV7NJQOml/10uPU0s4SAXGnowi5qBSjaLWMojNCUICNu7TshqHLAEbkBdAszL5TabfvQ48kK84hyFzjnw==} + '@esbuild/linux-riscv64@0.25.8': + resolution: {integrity: sha512-rAqDYFv3yzMrq7GIcen3XP7TUEG/4LK86LUPMIz6RT8A6pRIDn0sDcvjudVZBiiTcZCY9y2SgYX2lgK3AF+1eg==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] - '@esbuild/linux-s390x@0.25.2': - resolution: {integrity: sha512-GRa4IshOdvKY7M/rDpRR3gkiTNp34M0eLTaC1a08gNrh4u488aPhuZOCpkF6+2wl3zAN7L7XIpOFBhnaE3/Q8Q==} + '@esbuild/linux-s390x@0.25.8': + resolution: {integrity: sha512-Xutvh6VjlbcHpsIIbwY8GVRbwoviWT19tFhgdA7DlenLGC/mbc3lBoVb7jxj9Z+eyGqvcnSyIltYUrkKzWqSvg==} engines: {node: '>=18'} cpu: [s390x] os: [linux] - '@esbuild/linux-x64@0.25.2': - resolution: {integrity: sha512-QInHERlqpTTZ4FRB0fROQWXcYRD64lAoiegezDunLpalZMjcUcld3YzZmVJ2H/Cp0wJRZ8Xtjtj0cEHhYc/uUg==} + '@esbuild/linux-x64@0.25.8': + resolution: {integrity: sha512-ASFQhgY4ElXh3nDcOMTkQero4b1lgubskNlhIfJrsH5OKZXDpUAKBlNS0Kx81jwOBp+HCeZqmoJuihTv57/jvQ==} engines: {node: '>=18'} cpu: [x64] os: [linux] - '@esbuild/netbsd-arm64@0.25.2': - resolution: {integrity: sha512-talAIBoY5M8vHc6EeI2WW9d/CkiO9MQJ0IOWX8hrLhxGbro/vBXJvaQXefW2cP0z0nQVTdQ/eNyGFV1GSKrxfw==} + '@esbuild/netbsd-arm64@0.25.8': + resolution: {integrity: sha512-d1KfruIeohqAi6SA+gENMuObDbEjn22olAR7egqnkCD9DGBG0wsEARotkLgXDu6c4ncgWTZJtN5vcgxzWRMzcw==} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-x64@0.25.2': - resolution: {integrity: sha512-voZT9Z+tpOxrvfKFyfDYPc4DO4rk06qamv1a/fkuzHpiVBMOhpjK+vBmWM8J1eiB3OLSMFYNaOaBNLXGChf5tg==} + '@esbuild/netbsd-x64@0.25.8': + resolution: {integrity: sha512-nVDCkrvx2ua+XQNyfrujIG38+YGyuy2Ru9kKVNyh5jAys6n+l44tTtToqHjino2My8VAY6Lw9H7RI73XFi66Cg==} engines: {node: '>=18'} cpu: [x64] os: [netbsd] - '@esbuild/openbsd-arm64@0.25.2': - resolution: {integrity: sha512-dcXYOC6NXOqcykeDlwId9kB6OkPUxOEqU+rkrYVqJbK2hagWOMrsTGsMr8+rW02M+d5Op5NNlgMmjzecaRf7Tg==} + '@esbuild/openbsd-arm64@0.25.8': + resolution: {integrity: sha512-j8HgrDuSJFAujkivSMSfPQSAa5Fxbvk4rgNAS5i3K+r8s1X0p1uOO2Hl2xNsGFppOeHOLAVgYwDVlmxhq5h+SQ==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-x64@0.25.2': - resolution: {integrity: sha512-t/TkWwahkH0Tsgoq1Ju7QfgGhArkGLkF1uYz8nQS/PPFlXbP5YgRpqQR3ARRiC2iXoLTWFxc6DJMSK10dVXluw==} + '@esbuild/openbsd-x64@0.25.8': + resolution: {integrity: sha512-1h8MUAwa0VhNCDp6Af0HToI2TJFAn1uqT9Al6DJVzdIBAd21m/G0Yfc77KDM3uF3T/YaOgQq3qTJHPbTOInaIQ==} engines: {node: '>=18'} cpu: [x64] os: [openbsd] - '@esbuild/sunos-x64@0.25.2': - resolution: {integrity: sha512-cfZH1co2+imVdWCjd+D1gf9NjkchVhhdpgb1q5y6Hcv9TP6Zi9ZG/beI3ig8TvwT9lH9dlxLq5MQBBgwuj4xvA==} + '@esbuild/openharmony-arm64@0.25.8': + resolution: {integrity: sha512-r2nVa5SIK9tSWd0kJd9HCffnDHKchTGikb//9c7HX+r+wHYCpQrSgxhlY6KWV1nFo1l4KFbsMlHk+L6fekLsUg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.25.8': + resolution: {integrity: sha512-zUlaP2S12YhQ2UzUfcCuMDHQFJyKABkAjvO5YSndMiIkMimPmxA+BYSBikWgsRpvyxuRnow4nS5NPnf9fpv41w==} engines: {node: '>=18'} cpu: [x64] os: [sunos] - '@esbuild/win32-arm64@0.25.2': - resolution: {integrity: sha512-7Loyjh+D/Nx/sOTzV8vfbB3GJuHdOQyrOryFdZvPHLf42Tk9ivBU5Aedi7iyX+x6rbn2Mh68T4qq1SDqJBQO5Q==} + '@esbuild/win32-arm64@0.25.8': + resolution: {integrity: sha512-YEGFFWESlPva8hGL+zvj2z/SaK+pH0SwOM0Nc/d+rVnW7GSTFlLBGzZkuSU9kFIGIo8q9X3ucpZhu8PDN5A2sQ==} engines: {node: '>=18'} cpu: [arm64] os: [win32] - '@esbuild/win32-ia32@0.25.2': - resolution: {integrity: sha512-WRJgsz9un0nqZJ4MfhabxaD9Ft8KioqU3JMinOTvobbX6MOSUigSBlogP8QB3uxpJDsFS6yN+3FDBdqE5lg9kg==} + '@esbuild/win32-ia32@0.25.8': + resolution: {integrity: sha512-hiGgGC6KZ5LZz58OL/+qVVoZiuZlUYlYHNAmczOm7bs2oE1XriPFi5ZHHrS8ACpV5EjySrnoCKmcbQMN+ojnHg==} engines: {node: '>=18'} cpu: [ia32] os: [win32] - '@esbuild/win32-x64@0.25.2': - resolution: {integrity: sha512-kM3HKb16VIXZyIeVrM1ygYmZBKybX8N4p754bw390wGO3Tf2j4L2/WYL+4suWujpgf6GBYs3jv7TyUivdd05JA==} + '@esbuild/win32-x64@0.25.8': + resolution: {integrity: sha512-cn3Yr7+OaaZq1c+2pe+8yxC8E144SReCQjN6/2ynubzYjvyqZjTXfQJpAcQpsdJq3My7XADANiYGHoFC69pLQw==} engines: {node: '>=18'} cpu: [x64] os: [win32] - '@eslint-community/eslint-utils@4.5.1': - resolution: {integrity: sha512-soEIOALTfTK6EjmKMMoLugwaP0rzkad90iIWd1hMO9ARkSAyjfMfkRRhLvD5qH7vvM0Cg72pieUfR6yh6XxC4w==} + '@eslint-community/eslint-utils@4.7.0': + resolution: {integrity: sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 @@ -424,51 +427,51 @@ packages: resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - '@eslint/compat@1.2.7': - resolution: {integrity: sha512-xvv7hJE32yhegJ8xNAnb62ggiAwTYHBpUCWhRxEj/ksvgDJuSXfoDkBcRYaYNFiJ+jH0IE3K16hd+xXzhBgNbg==} + '@eslint/compat@1.3.1': + resolution: {integrity: sha512-k8MHony59I5EPic6EQTCNOuPoVBnoYXkP+20xvwFjN7t0qI3ImyvyBgg+hIVPwC8JaxVjjUZld+cLfBLFDLucg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - eslint: ^9.10.0 + eslint: ^8.40 || 9 peerDependenciesMeta: eslint: optional: true - '@eslint/config-array@0.19.2': - resolution: {integrity: sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==} + '@eslint/config-array@0.21.0': + resolution: {integrity: sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/config-helpers@0.2.0': - resolution: {integrity: sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ==} + '@eslint/config-helpers@0.3.0': + resolution: {integrity: sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@0.12.0': - resolution: {integrity: sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==} + '@eslint/core@0.15.1': + resolution: {integrity: sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/eslintrc@3.3.1': resolution: {integrity: sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.23.0': - resolution: {integrity: sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw==} + '@eslint/js@9.32.0': + resolution: {integrity: sha512-BBpRFZK3eX6uMLKz8WxFOBIFFcGFJ/g8XuwjTHCqHROSIsopI+ddn/d5Cfh36+7+e5edVS8dbSHnBNhrLEX0zg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/object-schema@2.1.6': resolution: {integrity: sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/plugin-kit@0.2.7': - resolution: {integrity: sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==} + '@eslint/plugin-kit@0.3.4': + resolution: {integrity: sha512-Ul5l+lHEcw3L5+k8POx6r74mxEYKG5kOb6Xpy2gCRW6zweT6TEhAf8vhxGgjhqrd/VO/Dirhsb+1hNpD1ue9hw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@floating-ui/core@1.6.9': - resolution: {integrity: sha512-uMXCuQ3BItDUbAMhIXw7UPXRfAlOAvZzdK9BWpE60MCn+Svt3aLn9jsPTi/WNGlRUu2uI0v5S7JiIUsbsvh3fw==} + '@floating-ui/core@1.7.3': + resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==} - '@floating-ui/dom@1.6.13': - resolution: {integrity: sha512-umqzocjDgNRGTuO7Q8CU32dkHkECqI8ZdMZ5Swb6QAM0t5rnlrN3lGo1hdpscRd3WS8T6DKYK4ephgIH9iRh3w==} + '@floating-ui/dom@1.7.2': + resolution: {integrity: sha512-7cfaOQuCS27HD7DX+6ib2OrnW+b4ZBwDNnCcT0uTyidcmyWb03FnQqJybDBoCnpdxwBSfA94UAYlRCt7mV+TbA==} - '@floating-ui/utils@0.2.9': - resolution: {integrity: sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==} + '@floating-ui/utils@0.2.10': + resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==} '@hey-api/openapi-ts@0.52.0': resolution: {integrity: sha512-DA3Zf5ONxMK1PUkK88lAuYbXMgn5BvU5sjJdTAO2YOn6Eu/9ovilBztMzvu8pyY44PmL3n4ex4+f+XIwvgfhvw==} @@ -493,20 +496,23 @@ packages: resolution: {integrity: sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==} engines: {node: '>=18.18'} - '@humanwhocodes/retry@0.4.2': - resolution: {integrity: sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ==} + '@humanwhocodes/retry@0.4.3': + resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} engines: {node: '>=18.18'} - '@internationalized/date@3.7.0': - resolution: {integrity: sha512-VJ5WS3fcVx0bejE/YHfbDKR/yawZgKqn/if+oEeLqNwBtPzVB06olkfcnojTmEMX+gTpH+FlQ69SHNitJ8/erQ==} + '@internationalized/date@3.8.2': + resolution: {integrity: sha512-/wENk7CbvLbkUvX1tu0mwq49CVkkWpkXubGel6birjRPyo6uQ4nQpnq5xZu823zRCwwn82zgHrvgF1vZyvmVgA==} - '@internationalized/number@3.6.0': - resolution: {integrity: sha512-PtrRcJVy7nw++wn4W2OuePQQfTqDzfusSuY1QTtui4wa7r+rGVtR75pO8CyKvHvzyQYi3Q1uO5sY0AsB4e65Bw==} + '@internationalized/number@3.6.3': + resolution: {integrity: sha512-p+Zh1sb6EfrfVaS86jlHGQ9HA66fJhV9x5LiE5vCbZtXEHAuhcmUZUdZ4WrFpUBfNalr2OkAJI5AcKEQF+Lebw==} '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} + '@jridgewell/gen-mapping@0.3.12': + resolution: {integrity: sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==} + '@jridgewell/gen-mapping@0.3.8': resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==} engines: {node: '>=6.0.0'} @@ -522,9 +528,15 @@ packages: '@jridgewell/sourcemap-codec@1.5.0': resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} + '@jridgewell/sourcemap-codec@1.5.4': + resolution: {integrity: sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==} + '@jridgewell/trace-mapping@0.3.25': resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + '@jridgewell/trace-mapping@0.3.29': + resolution: {integrity: sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==} + '@jsdevtools/ono@7.1.3': resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} @@ -540,188 +552,191 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} - '@pandacss/is-valid-prop@0.41.0': - resolution: {integrity: sha512-BE6h6CsJk14ugIRrsazJtN3fcg+KDFRat1Bs93YFKH6jd4DOb1yUyVvC70jKqPVvg70zEcV8acZ7VdcU5TLu+w==} + '@pandacss/is-valid-prop@0.54.0': + resolution: {integrity: sha512-UhRgg1k9VKRCBAHl+XUK3lvN0k9bYifzYGZOqajDid4L1DyU813A1L0ZwN4iV9WX5TX3PfUugqtgG9LnIeFGBQ==} '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@pkgr/core@0.2.0': - resolution: {integrity: sha512-vsJDAkYR6qCPu+ioGScGiMYR7LvZYIXh/dlQeviqoTWNCVfKTLYD/LkNWH4Mxsv2a5vpIRc77FN5DnmK1eBggQ==} + '@pkgr/core@0.2.9': + resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@rollup/rollup-android-arm-eabi@4.39.0': - resolution: {integrity: sha512-lGVys55Qb00Wvh8DMAocp5kIcaNzEFTmGhfFd88LfaogYTRKrdxgtlO5H6S49v2Nd8R2C6wLOal0qv6/kCkOwA==} + '@rolldown/pluginutils@1.0.0-beta.27': + resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==} + + '@rollup/rollup-android-arm-eabi@4.46.2': + resolution: {integrity: sha512-Zj3Hl6sN34xJtMv7Anwb5Gu01yujyE/cLBDB2gnHTAHaWS1Z38L7kuSG+oAh0giZMqG060f/YBStXtMH6FvPMA==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.39.0': - resolution: {integrity: sha512-It9+M1zE31KWfqh/0cJLrrsCPiF72PoJjIChLX+rEcujVRCb4NLQ5QzFkzIZW8Kn8FTbvGQBY5TkKBau3S8cCQ==} + '@rollup/rollup-android-arm64@4.46.2': + resolution: {integrity: sha512-nTeCWY83kN64oQ5MGz3CgtPx8NSOhC5lWtsjTs+8JAJNLcP3QbLCtDDgUKQc/Ro/frpMq4SHUaHN6AMltcEoLQ==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.39.0': - resolution: {integrity: sha512-lXQnhpFDOKDXiGxsU9/l8UEGGM65comrQuZ+lDcGUx+9YQ9dKpF3rSEGepyeR5AHZ0b5RgiligsBhWZfSSQh8Q==} + '@rollup/rollup-darwin-arm64@4.46.2': + resolution: {integrity: sha512-HV7bW2Fb/F5KPdM/9bApunQh68YVDU8sO8BvcW9OngQVN3HHHkw99wFupuUJfGR9pYLLAjcAOA6iO+evsbBaPQ==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.39.0': - resolution: {integrity: sha512-mKXpNZLvtEbgu6WCkNij7CGycdw9cJi2k9v0noMb++Vab12GZjFgUXD69ilAbBh034Zwn95c2PNSz9xM7KYEAQ==} + '@rollup/rollup-darwin-x64@4.46.2': + resolution: {integrity: sha512-SSj8TlYV5nJixSsm/y3QXfhspSiLYP11zpfwp6G/YDXctf3Xkdnk4woJIF5VQe0of2OjzTt8EsxnJDCdHd2xMA==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.39.0': - resolution: {integrity: sha512-jivRRlh2Lod/KvDZx2zUR+I4iBfHcu2V/BA2vasUtdtTN2Uk3jfcZczLa81ESHZHPHy4ih3T/W5rPFZ/hX7RtQ==} + '@rollup/rollup-freebsd-arm64@4.46.2': + resolution: {integrity: sha512-ZyrsG4TIT9xnOlLsSSi9w/X29tCbK1yegE49RYm3tu3wF1L/B6LVMqnEWyDB26d9Ecx9zrmXCiPmIabVuLmNSg==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.39.0': - resolution: {integrity: sha512-8RXIWvYIRK9nO+bhVz8DwLBepcptw633gv/QT4015CpJ0Ht8punmoHU/DuEd3iw9Hr8UwUV+t+VNNuZIWYeY7Q==} + '@rollup/rollup-freebsd-x64@4.46.2': + resolution: {integrity: sha512-pCgHFoOECwVCJ5GFq8+gR8SBKnMO+xe5UEqbemxBpCKYQddRQMgomv1104RnLSg7nNvgKy05sLsY51+OVRyiVw==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.39.0': - resolution: {integrity: sha512-mz5POx5Zu58f2xAG5RaRRhp3IZDK7zXGk5sdEDj4o96HeaXhlUwmLFzNlc4hCQi5sGdR12VDgEUqVSHer0lI9g==} + '@rollup/rollup-linux-arm-gnueabihf@4.46.2': + resolution: {integrity: sha512-EtP8aquZ0xQg0ETFcxUbU71MZlHaw9MChwrQzatiE8U/bvi5uv/oChExXC4mWhjiqK7azGJBqU0tt5H123SzVA==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.39.0': - resolution: {integrity: sha512-+YDwhM6gUAyakl0CD+bMFpdmwIoRDzZYaTWV3SDRBGkMU/VpIBYXXEvkEcTagw/7VVkL2vA29zU4UVy1mP0/Yw==} + '@rollup/rollup-linux-arm-musleabihf@4.46.2': + resolution: {integrity: sha512-qO7F7U3u1nfxYRPM8HqFtLd+raev2K137dsV08q/LRKRLEc7RsiDWihUnrINdsWQxPR9jqZ8DIIZ1zJJAm5PjQ==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.39.0': - resolution: {integrity: sha512-EKf7iF7aK36eEChvlgxGnk7pdJfzfQbNvGV/+l98iiMwU23MwvmV0Ty3pJ0p5WQfm3JRHOytSIqD9LB7Bq7xdQ==} + '@rollup/rollup-linux-arm64-gnu@4.46.2': + resolution: {integrity: sha512-3dRaqLfcOXYsfvw5xMrxAk9Lb1f395gkoBYzSFcc/scgRFptRXL9DOaDpMiehf9CO8ZDRJW2z45b6fpU5nwjng==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.39.0': - resolution: {integrity: sha512-vYanR6MtqC7Z2SNr8gzVnzUul09Wi1kZqJaek3KcIlI/wq5Xtq4ZPIZ0Mr/st/sv/NnaPwy/D4yXg5x0B3aUUA==} + '@rollup/rollup-linux-arm64-musl@4.46.2': + resolution: {integrity: sha512-fhHFTutA7SM+IrR6lIfiHskxmpmPTJUXpWIsBXpeEwNgZzZZSg/q4i6FU4J8qOGyJ0TR+wXBwx/L7Ho9z0+uDg==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loongarch64-gnu@4.39.0': - resolution: {integrity: sha512-NMRUT40+h0FBa5fb+cpxtZoGAggRem16ocVKIv5gDB5uLDgBIwrIsXlGqYbLwW8YyO3WVTk1FkFDjMETYlDqiw==} + '@rollup/rollup-linux-loongarch64-gnu@4.46.2': + resolution: {integrity: sha512-i7wfGFXu8x4+FRqPymzjD+Hyav8l95UIZ773j7J7zRYc3Xsxy2wIn4x+llpunexXe6laaO72iEjeeGyUFmjKeA==} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-powerpc64le-gnu@4.39.0': - resolution: {integrity: sha512-0pCNnmxgduJ3YRt+D+kJ6Ai/r+TaePu9ZLENl+ZDV/CdVczXl95CbIiwwswu4L+K7uOIGf6tMo2vm8uadRaICQ==} + '@rollup/rollup-linux-ppc64-gnu@4.46.2': + resolution: {integrity: sha512-B/l0dFcHVUnqcGZWKcWBSV2PF01YUt0Rvlurci5P+neqY/yMKchGU8ullZvIv5e8Y1C6wOn+U03mrDylP5q9Yw==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.39.0': - resolution: {integrity: sha512-t7j5Zhr7S4bBtksT73bO6c3Qa2AV/HqiGlj9+KB3gNF5upcVkx+HLgxTm8DK4OkzsOYqbdqbLKwvGMhylJCPhQ==} + '@rollup/rollup-linux-riscv64-gnu@4.46.2': + resolution: {integrity: sha512-32k4ENb5ygtkMwPMucAb8MtV8olkPT03oiTxJbgkJa7lJ7dZMr0GCFJlyvy+K8iq7F/iuOr41ZdUHaOiqyR3iQ==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-riscv64-musl@4.39.0': - resolution: {integrity: sha512-m6cwI86IvQ7M93MQ2RF5SP8tUjD39Y7rjb1qjHgYh28uAPVU8+k/xYWvxRO3/tBN2pZkSMa5RjnPuUIbrwVxeA==} + '@rollup/rollup-linux-riscv64-musl@4.46.2': + resolution: {integrity: sha512-t5B2loThlFEauloaQkZg9gxV05BYeITLvLkWOkRXogP4qHXLkWSbSHKM9S6H1schf/0YGP/qNKtiISlxvfmmZw==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.39.0': - resolution: {integrity: sha512-iRDJd2ebMunnk2rsSBYlsptCyuINvxUfGwOUldjv5M4tpa93K8tFMeYGpNk2+Nxl+OBJnBzy2/JCscGeO507kA==} + '@rollup/rollup-linux-s390x-gnu@4.46.2': + resolution: {integrity: sha512-YKjekwTEKgbB7n17gmODSmJVUIvj8CX7q5442/CK80L8nqOUbMtf8b01QkG3jOqyr1rotrAnW6B/qiHwfcuWQA==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.39.0': - resolution: {integrity: sha512-t9jqYw27R6Lx0XKfEFe5vUeEJ5pF3SGIM6gTfONSMb7DuG6z6wfj2yjcoZxHg129veTqU7+wOhY6GX8wmf90dA==} + '@rollup/rollup-linux-x64-gnu@4.46.2': + resolution: {integrity: sha512-Jj5a9RUoe5ra+MEyERkDKLwTXVu6s3aACP51nkfnK9wJTraCC8IMe3snOfALkrjTYd2G1ViE1hICj0fZ7ALBPA==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.39.0': - resolution: {integrity: sha512-ThFdkrFDP55AIsIZDKSBWEt/JcWlCzydbZHinZ0F/r1h83qbGeenCt/G/wG2O0reuENDD2tawfAj2s8VK7Bugg==} + '@rollup/rollup-linux-x64-musl@4.46.2': + resolution: {integrity: sha512-7kX69DIrBeD7yNp4A5b81izs8BqoZkCIaxQaOpumcJ1S/kmqNFjPhDu1LHeVXv0SexfHQv5cqHsxLOjETuqDuA==} cpu: [x64] os: [linux] - '@rollup/rollup-win32-arm64-msvc@4.39.0': - resolution: {integrity: sha512-jDrLm6yUtbOg2TYB3sBF3acUnAwsIksEYjLeHL+TJv9jg+TmTwdyjnDex27jqEMakNKf3RwwPahDIt7QXCSqRQ==} + '@rollup/rollup-win32-arm64-msvc@4.46.2': + resolution: {integrity: sha512-wiJWMIpeaak/jsbaq2HMh/rzZxHVW1rU6coyeNNpMwk5isiPjSTx0a4YLSlYDwBH/WBvLz+EtsNqQScZTLJy3g==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.39.0': - resolution: {integrity: sha512-6w9uMuza+LbLCVoNKL5FSLE7yvYkq9laSd09bwS0tMjkwXrmib/4KmoJcrKhLWHvw19mwU+33ndC69T7weNNjQ==} + '@rollup/rollup-win32-ia32-msvc@4.46.2': + resolution: {integrity: sha512-gBgaUDESVzMgWZhcyjfs9QFK16D8K6QZpwAaVNJxYDLHWayOta4ZMjGm/vsAEy3hvlS2GosVFlBlP9/Wb85DqQ==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.39.0': - resolution: {integrity: sha512-yAkUOkIKZlK5dl7u6dg897doBgLXmUHhIINM2c+sND3DZwnrdQkkSiDh7N75Ll4mM4dxSkYfXqU9fW3lLkMFug==} + '@rollup/rollup-win32-x64-msvc@4.46.2': + resolution: {integrity: sha512-CvUo2ixeIQGtF6WvuB87XWqPQkoFAFqW+HUo/WzHwuHDvIwZCtjdWXoYCcr06iKGydiqTclC4jU/TNObC/xKZg==} cpu: [x64] os: [win32] - '@stylistic/eslint-plugin@2.13.0': - resolution: {integrity: sha512-RnO1SaiCFHn666wNz2QfZEFxvmiNRqhzaMXHXxXXKt+MEP7aajlPxUSMIQpKAaJfverpovEYqjBOXDq6dDcaOQ==} + '@stylistic/eslint-plugin@5.2.2': + resolution: {integrity: sha512-bE2DUjruqXlHYP3Q2Gpqiuj2bHq7/88FnuaS0FjeGGLCy+X6a07bGVuwtiOYnPSLHR6jmx5Bwdv+j7l8H+G97A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - eslint: '>=8.40.0' + eslint: '>=9.0.0' - '@swc/core-darwin-arm64@1.11.12': - resolution: {integrity: sha512-x+iljeyIaVq7VCAy9pM0rqAb9GKA1cqDkqCxgFDxH3rcH+ykZa12vkDlTwysgkfLV8pr0KhCRHkwY+iAqPbO9g==} + '@swc/core-darwin-arm64@1.13.1': + resolution: {integrity: sha512-zO6SW/jSMTUORPm6dUZFPUwf+EFWZsaXWMGXadRG6akCofYpoQb8pcY2QZkVr43z8TMka6BtXpyoD/DJ0iOPHQ==} engines: {node: '>=10'} cpu: [arm64] os: [darwin] - '@swc/core-darwin-x64@1.11.12': - resolution: {integrity: sha512-DwTXPdhJ/+scUR1iWttu3p0q8b5omF71xWFCw6UC99QBJQ4femmRtZNacgdiBkxZ5IbUlxd8m5UzMBc/+H5rWw==} + '@swc/core-darwin-x64@1.13.1': + resolution: {integrity: sha512-8RjaTZYxrlYKE5PgzZYWSOT4mAsyhIuh30Nu4dnn/2r0Ef68iNCbvX4ynGnFMhOIhqunjQbJf+mJKpwTwdHXhw==} engines: {node: '>=10'} cpu: [x64] os: [darwin] - '@swc/core-linux-arm-gnueabihf@1.11.12': - resolution: {integrity: sha512-ls9b3lX2x3tnJKGn6zSDFK1ohdmdUkE6nwqrVmdzqAwr/Q5i2ij/dmkOFCloItc2PHNVtRGGsC4+FYSm1EBLjg==} + '@swc/core-linux-arm-gnueabihf@1.13.1': + resolution: {integrity: sha512-jEqK6pECs2m4BpL2JA/4CCkq04p6iFOEtVNXTisO+lJ3zwmxlnIEm9UfJZG6VSu8GS9MHRKGB0ieZ1tEdN1qDA==} engines: {node: '>=10'} cpu: [arm] os: [linux] - '@swc/core-linux-arm64-gnu@1.11.12': - resolution: {integrity: sha512-F0nMLl5kYbew5GjHq7B21poE5VOPgSsoQ0VEXd4Fji3rR0d0gLoK2r+JP92XmpRxAzdzpdak1DQczWMyf2BQAQ==} + '@swc/core-linux-arm64-gnu@1.13.1': + resolution: {integrity: sha512-PbkuIOYXO/gQbWQ7NnYIwm59ygNqmUcF8LBeoKvxhx1VtOwE+9KiTfoplOikkPLhMiTzKsd8qentTslbITIg+Q==} engines: {node: '>=10'} cpu: [arm64] os: [linux] - '@swc/core-linux-arm64-musl@1.11.12': - resolution: {integrity: sha512-3dlHowBgYBgi23ZBSvFHe/tD3PowEhxfVAy08NckWBeaG/e4dyrYMhAiccfuy6jkDYXEF1L2DtpRtxGImxoaPg==} + '@swc/core-linux-arm64-musl@1.13.1': + resolution: {integrity: sha512-JaqFdBCarIBKiMu5bbAp+kWPMNGg97ej+7KzbKOzWP5pRptqKi86kCDZT3WmjPe8hNG6dvBwbm7Y8JNry5LebQ==} engines: {node: '>=10'} cpu: [arm64] os: [linux] - '@swc/core-linux-x64-gnu@1.11.12': - resolution: {integrity: sha512-ToEWzLA5lXlYCbGNzMow6+uy4zhpXKQyFb3RHM8AYVb0n4pNPWvwF+8ybWDimeGBBaHJLgRQsUMuJ4NV6urSrA==} + '@swc/core-linux-x64-gnu@1.13.1': + resolution: {integrity: sha512-t4cLkku10YECDaakWUH0452WJHIZtrLPRwezt6BdoMntVMwNjvXRX7C8bGuYcKC3YxRW7enZKFpozLhQIQ37oA==} engines: {node: '>=10'} cpu: [x64] os: [linux] - '@swc/core-linux-x64-musl@1.11.12': - resolution: {integrity: sha512-N5xF+MDZr79e8gvVXX3YP1bMeaRL16Kst/R7bGUQvvCq1UGD86qMUtSr5KfCl0h5SNKP2YKtkN98HQLnGEikow==} + '@swc/core-linux-x64-musl@1.13.1': + resolution: {integrity: sha512-fSMwZOaG+3ukUucbEbzz9GhzGhUhXoCPqHe9qW0/Vc2IZRp538xalygKyZynYweH5d9EHux1aj3+IO8/xBaoiA==} engines: {node: '>=10'} cpu: [x64] os: [linux] - '@swc/core-win32-arm64-msvc@1.11.12': - resolution: {integrity: sha512-/PYiyYWSQRtMoOamMfhAfq0y3RWk9LpUZ49yetJn2XI85TRkL5u2DTLLNkTPvoTiCfo0eZOJF9t5b7Z6ly0iHQ==} + '@swc/core-win32-arm64-msvc@1.13.1': + resolution: {integrity: sha512-tweCXK/79vAwj1NhAsYgICy8T1z2QEairmN2BFEBYFBFNMEB1iI1YlXwBkBtuihRvgZrTh1ORusKa4jLYzLCZA==} engines: {node: '>=10'} cpu: [arm64] os: [win32] - '@swc/core-win32-ia32-msvc@1.11.12': - resolution: {integrity: sha512-Dxm6W4p0YVNIPnYh/Kf/9zPeaD6sVAGDQN+2c52l4m/4gR5aDgE+xg6k5lAt4ok7LDXInL3n1nwYEG7Tc4JcSQ==} + '@swc/core-win32-ia32-msvc@1.13.1': + resolution: {integrity: sha512-zi7hO9D+2R2yQN9D7T10/CAI9KhuXkNkz8tcJOW6+dVPtAk/gsIC5NoGPELjgrAlLL9CS38ZQpLDslLfpP15ng==} engines: {node: '>=10'} cpu: [ia32] os: [win32] - '@swc/core-win32-x64-msvc@1.11.12': - resolution: {integrity: sha512-PP8RSJTcda5nUHJGkbKeQ20OC+L2LxcbjYpyha1OqIFyu/qWG9zMMYVaTLKJL7zsJ14pIM/mpS3u+CJARQ+Hzw==} + '@swc/core-win32-x64-msvc@1.13.1': + resolution: {integrity: sha512-KubYjzqs/nz3H69ncX/XHKsC8c1xqc7UvonQAj26BhbL22HBsqdAaVutZ+Obho6RMpd3F5qQ95ldavUTWskRrw==} engines: {node: '>=10'} cpu: [x64] os: [win32] - '@swc/core@1.11.12': - resolution: {integrity: sha512-Jwx9JH1O6Vm7BS9AEPLlquJNSy6Lbt/kiJIlxSslDuBLeDJD13lXQfitvazqgRwGEHx1QmwEq8mc0OSristtRw==} + '@swc/core@1.13.1': + resolution: {integrity: sha512-jEKKErLC6uwSqA+p6bmZR08usZM5Fpc+HdEu5CAzvye0q43yf1si1kjhHEa9XMkz0A2SAaal3eKCg/YYmtOsCA==} engines: {node: '>=10'} peerDependencies: - '@swc/helpers': '*' + '@swc/helpers': '>=0.5.17' peerDependenciesMeta: '@swc/helpers': optional: true @@ -729,17 +744,17 @@ packages: '@swc/counter@0.1.3': resolution: {integrity: sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==} - '@swc/helpers@0.5.15': - resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==} + '@swc/helpers@0.5.17': + resolution: {integrity: sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==} - '@swc/types@0.1.19': - resolution: {integrity: sha512-WkAZaAfj44kh/UFdAQcrMP1I0nwRqpt27u+08LMBYMqmQfwwMofYoMh/48NGkMMRfC4ynpfwRbJuu8ErfNloeA==} + '@swc/types@0.1.23': + resolution: {integrity: sha512-u1iIVZV9Q0jxY+yM2vw/hZGDNudsN85bBpTqzAQ9rzkxW9D+e3aEM4Han+ow518gSewkXgjmEK0BD79ZcNVgPw==} - '@tanstack/query-core@5.71.1': - resolution: {integrity: sha512-4+ZswCHOfJX+ikhXNoocamTUmJcHtB+Ljjz/oJkC7/eKB5IrzEwR4vEwZUENiPi+wISucJHR5TUbuuJ26w3kdQ==} + '@tanstack/query-core@5.83.1': + resolution: {integrity: sha512-OG69LQgT7jSp+5pPuCfzltq/+7l2xoweggjme9vlbCPa/d7D7zaqv5vN/S82SzSYZ4EDLTxNO1PWrv49RAS64Q==} - '@tanstack/react-query@5.71.1': - resolution: {integrity: sha512-6BTkaSIGT58MroI4kIGXNdx/NhirXPU+75AJObLq+WBa39WmoxhzSk0YX+hqWJ/bvqZJFxslbEU4qIHaRZq+8Q==} + '@tanstack/react-query@5.84.1': + resolution: {integrity: sha512-zo7EUygcWJMQfFNWDSG7CBhy8irje/XY0RDVKKV4IQJAysb+ZJkkJPcnQi+KboyGUgT+SQebRFoTqLuTtfoDLw==} peerDependencies: react: ^18 || ^19 @@ -747,12 +762,12 @@ packages: resolution: {integrity: sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==} engines: {node: '>=18'} - '@testing-library/jest-dom@6.6.3': - resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==} + '@testing-library/jest-dom@6.6.4': + resolution: {integrity: sha512-xDXgLjVunjHqczScfkCJ9iyjdNOVHvvCdqHSSxwM9L0l/wHkTRum67SDc020uAlCoqktJplgO2AAQeLP1wgqDQ==} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} - '@testing-library/react@16.2.0': - resolution: {integrity: sha512-2cSskAvA1QNtKc8Y9VJQRv0tm3hLVgxRGDB+KYhIaPQJ1I+RHbhIXcM+zClKXzMes/wshsMVzf4B9vS4IZpqDQ==} + '@testing-library/react@16.3.0': + resolution: {integrity: sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==} engines: {node: '>=18'} peerDependencies: '@testing-library/dom': ^10.0.0 @@ -766,14 +781,21 @@ packages: '@types/react-dom': optional: true - '@trivago/prettier-plugin-sort-imports@4.3.0': - resolution: {integrity: sha512-r3n0onD3BTOVUNPhR4lhVK4/pABGpbA7bW3eumZnYdKaHkf1qEC+Mag6DPbGNuuh0eG8AaYj+YqmVHSiGslaTQ==} + '@trivago/prettier-plugin-sort-imports@5.2.2': + resolution: {integrity: sha512-fYDQA9e6yTNmA13TLVSA+WMQRc5Bn/c0EUBditUHNfMMxN7M82c38b1kEggVE3pLpZ0FwkwJkUEKMiOi52JXFA==} + engines: {node: '>18.12'} peerDependencies: '@vue/compiler-sfc': 3.x prettier: 2.x - 3.x + prettier-plugin-svelte: 3.x + svelte: 4.x || 5.x peerDependenciesMeta: '@vue/compiler-sfc': optional: true + prettier-plugin-svelte: + optional: true + svelte: + optional: true '@ts-morph/common@0.23.0': resolution: {integrity: sha512-m7Lllj9n/S6sOkCkRftpM7L24uvmfXQFedlW/4hENcuJH1HHm9u5EgxZb9uVjQSCGrbBWBkOGgcTxNg36r6ywA==} @@ -781,14 +803,14 @@ packages: '@types/aria-query@5.0.4': resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} - '@types/cookie@0.6.0': - resolution: {integrity: sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==} + '@types/chai@5.2.2': + resolution: {integrity: sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==} - '@types/estree@1.0.6': - resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} + '@types/deep-eql@4.0.2': + resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==} - '@types/estree@1.0.7': - resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==} + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} '@types/hoist-non-react-statics@3.3.6': resolution: {integrity: sha512-lPByRJUer/iN/xa4qpyL0qmL11DqNW81iU/IG1S3uvRUq4oKagz8VCxZjiWkumgt66YT3vOdDgZ0o32sGKtCEw==} @@ -796,313 +818,373 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - '@types/normalize-package-data@2.4.4': - resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} + '@types/node@20.19.1': + resolution: {integrity: sha512-jJD50LtlD2dodAEO653i3YF04NWak6jN3ky+Ri3Em3mGR39/glWiboM/IePaRbgwSfqM1TpGXfAg8ohn/4dTgA==} '@types/parse-json@4.0.2': resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==} - '@types/prop-types@15.7.14': - resolution: {integrity: sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ==} + '@types/react-dom@19.1.7': + resolution: {integrity: sha512-i5ZzwYpqjmrKenzkoLM2Ibzt6mAsM7pxB6BCIouEVVmgiqaMj1TjaK7hnA36hbW5aZv20kx7Lw6hWzPWg0Rurw==} + peerDependencies: + '@types/react': ^19.0.0 - '@types/react-dom@19.0.0': - resolution: {integrity: sha512-1KfiQKsH1o00p9m5ag12axHQSb3FOU9H20UTrujVSkNhuCrRHiQWFqgEnTNK5ZNfnzZv8UWrnXVqCmCF9fgY3w==} + '@types/react@19.1.9': + resolution: {integrity: sha512-WmdoynAX8Stew/36uTSVMcLJJ1KRh6L3IZRx1PZ7qJtBqT3dYTgyDTx8H1qoRghErydW7xw9mSJ3wS//tCRpFA==} - '@types/react@18.3.20': - resolution: {integrity: sha512-IPaCZN7PShZK/3t6Q87pfTkRm6oLTd4vztyoj+cbHUF1g3FfVb2tFIL79uCRKEfv16AhqDMBywP2VW3KIZUvcg==} + '@types/whatwg-mimetype@3.0.2': + resolution: {integrity: sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==} - '@typescript-eslint/eslint-plugin@8.28.0': - resolution: {integrity: sha512-lvFK3TCGAHsItNdWZ/1FkvpzCxTHUVuFrdnOGLMa0GGCFIbCgQWVk3CzCGdA7kM3qGVc+dfW9tr0Z/sHnGDFyg==} + '@typescript-eslint/eslint-plugin@8.38.0': + resolution: {integrity: sha512-CPoznzpuAnIOl4nhj4tRr4gIPj5AfKgkiJmGQDaq+fQnRJTYlcBjbX3wbciGmpoPf8DREufuPRe1tNMZnGdanA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.0.0 || ^8.0.0-alpha.0 + '@typescript-eslint/parser': ^8.38.0 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/parser@8.28.0': - resolution: {integrity: sha512-LPcw1yHD3ToaDEoljFEfQ9j2xShY367h7FZ1sq5NJT9I3yj4LHer1Xd1yRSOdYy9BpsrxU7R+eoDokChYM53lQ==} + '@typescript-eslint/parser@8.38.0': + resolution: {integrity: sha512-Zhy8HCvBUEfBECzIl1PKqF4p11+d0aUJS1GeUiuqK9WmOug8YCmC4h4bjyBvMyAMI9sbRczmrYL5lKg/YMbrcQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/scope-manager@8.28.0': - resolution: {integrity: sha512-u2oITX3BJwzWCapoZ/pXw6BCOl8rJP4Ij/3wPoGvY8XwvXflOzd1kLrDUUUAIEdJSFh+ASwdTHqtan9xSg8buw==} + '@typescript-eslint/project-service@8.34.1': + resolution: {integrity: sha512-nuHlOmFZfuRwLJKDGQOVc0xnQrAmuq1Mj/ISou5044y1ajGNp2BNliIqp7F2LPQ5sForz8lempMFCovfeS1XoA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/type-utils@8.28.0': - resolution: {integrity: sha512-oRoXu2v0Rsy/VoOGhtWrOKDiIehvI+YNrDk5Oqj40Mwm0Yt01FC/Q7nFqg088d3yAsR1ZcZFVfPCTTFCe/KPwg==} + '@typescript-eslint/project-service@8.38.0': + resolution: {integrity: sha512-dbK7Jvqcb8c9QfH01YB6pORpqX1mn5gDZc9n63Ak/+jD67oWXn3Gs0M6vddAN+eDXBCS5EmNWzbSxsn9SzFWWg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/scope-manager@8.34.1': + resolution: {integrity: sha512-beu6o6QY4hJAgL1E8RaXNC071G4Kso2MGmJskCFQhRhg8VOH/FDbC8soP8NHN7e/Hdphwp8G8cE6OBzC8o41ZA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/scope-manager@8.38.0': + resolution: {integrity: sha512-WJw3AVlFFcdT9Ri1xs/lg8LwDqgekWXWhH3iAF+1ZM+QPd7oxQ6jvtW/JPwzAScxitILUIFs0/AnQ/UWHzbATQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/tsconfig-utils@8.34.1': + resolution: {integrity: sha512-K4Sjdo4/xF9NEeA2khOb7Y5nY6NSXBnod87uniVYW9kHP+hNlDV8trUSFeynA2uxWam4gIWgWoygPrv9VMWrYg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/tsconfig-utils@8.38.0': + resolution: {integrity: sha512-Lum9RtSE3EroKk/bYns+sPOodqb2Fv50XOl/gMviMKNvanETUuUcC9ObRbzrJ4VSd2JalPqgSAavwrPiPvnAiQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/type-utils@8.38.0': + resolution: {integrity: sha512-c7jAvGEZVf0ao2z+nnz8BUaHZD09Agbh+DY7qvBQqLiz8uJzRgVPj5YvOh8I8uEiH8oIUGIfHzMwUcGVco/SJg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/types@8.28.0': - resolution: {integrity: sha512-bn4WS1bkKEjx7HqiwG2JNB3YJdC1q6Ue7GyGlwPHyt0TnVq6TtD/hiOdTZt71sq0s7UzqBFXD8t8o2e63tXgwA==} + '@typescript-eslint/types@8.34.1': + resolution: {integrity: sha512-rjLVbmE7HR18kDsjNIZQHxmv9RZwlgzavryL5Lnj2ujIRTeXlKtILHgRNmQ3j4daw7zd+mQgy+uyt6Zo6I0IGA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/types@8.38.0': + resolution: {integrity: sha512-wzkUfX3plUqij4YwWaJyqhiPE5UCRVlFpKn1oCRn2O1bJ592XxWJj8ROQ3JD5MYXLORW84063z3tZTb/cs4Tyw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.28.0': - resolution: {integrity: sha512-H74nHEeBGeklctAVUvmDkxB1mk+PAZ9FiOMPFncdqeRBXxk1lWSYraHw8V12b7aa6Sg9HOBNbGdSHobBPuQSuA==} + '@typescript-eslint/typescript-estree@8.34.1': + resolution: {integrity: sha512-rjCNqqYPuMUF5ODD+hWBNmOitjBWghkGKJg6hiCHzUvXRy6rK22Jd3rwbP2Xi+R7oYVvIKhokHVhH41BxPV5mA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/typescript-estree@8.38.0': + resolution: {integrity: sha512-fooELKcAKzxux6fA6pxOflpNS0jc+nOQEEOipXFNjSlBS6fqrJOVY/whSn70SScHrcJ2LDsxWrneFoWYSVfqhQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/utils@8.34.1': + resolution: {integrity: sha512-mqOwUdZ3KjtGk7xJJnLbHxTuWVn3GO2WZZuM+Slhkun4+qthLdXx32C8xIXbO1kfCECb3jIs3eoxK3eryk7aoQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: + eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/utils@8.28.0': - resolution: {integrity: sha512-OELa9hbTYciYITqgurT1u/SzpQVtDLmQMFzy/N8pQE+tefOyCWT79jHsav294aTqV1q1u+VzqDGbuujvRYaeSQ==} + '@typescript-eslint/utils@8.38.0': + resolution: {integrity: sha512-hHcMA86Hgt+ijJlrD8fX0j1j8w4C92zue/8LOPAFioIno+W0+L7KqE8QZKCcPGc/92Vs9x36w/4MPTJhqXdyvg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/visitor-keys@8.28.0': - resolution: {integrity: sha512-hbn8SZ8w4u2pRwgQ1GlUrPKE+t2XvcCW5tTRF7j6SMYIuYG37XuzIW44JCZPa36evi0Oy2SnM664BlIaAuQcvg==} + '@typescript-eslint/visitor-keys@8.34.1': + resolution: {integrity: sha512-xoh5rJ+tgsRKoXnkBPFRLZ7rjKM0AfVbC68UZ/ECXoDbfggb9RbEySN359acY1vS3qZ0jVTVWzbtfapwm5ztxw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/visitor-keys@8.38.0': + resolution: {integrity: sha512-pWrTcoFNWuwHlA9CvlfSsGWs14JxfN1TH25zM5L7o0pRLhsoZkDnTsXfQRJBEWJoV5DL0jf+Z+sxiud+K0mq1g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@vitejs/plugin-react-swc@3.8.1': - resolution: {integrity: sha512-aEUPCckHDcFyxpwFm0AIkbtv6PpUp3xTb9wYGFjtABynXjCYKkWoxX0AOK9NT9XCrdk6mBBUOeHQS+RKdcNO1A==} + '@vitejs/plugin-react-swc@3.11.0': + resolution: {integrity: sha512-YTJCGFdNMHCMfjODYtxRNVAYmTWQ1Lb8PulP/2/f/oEEtglw8oKxKIZmmRkyXrVrHfsKOaVkAc3NT9/dMutO5w==} peerDependencies: - vite: ^4 || ^5 || ^6 + vite: ^4 || ^5 || ^6 || ^7 - '@vitest/expect@3.0.9': - resolution: {integrity: sha512-5eCqRItYgIML7NNVgJj6TVCmdzE7ZVgJhruW0ziSQV4V7PvLkDL1bBkBdcTs/VuIz0IxPb5da1IDSqc1TR9eig==} + '@vitest/expect@3.2.4': + resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==} - '@vitest/mocker@3.0.9': - resolution: {integrity: sha512-ryERPIBOnvevAkTq+L1lD+DTFBRcjueL9lOUfXsLfwP92h4e+Heb+PjiqS3/OURWPtywfafK0kj++yDFjWUmrA==} + '@vitest/mocker@3.2.4': + resolution: {integrity: sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==} peerDependencies: msw: ^2.4.9 - vite: ^5.0.0 || ^6.0.0 + vite: ^5.0.0 || ^6.0.0 || ^7.0.0-0 peerDependenciesMeta: msw: optional: true vite: optional: true - '@vitest/pretty-format@3.0.9': - resolution: {integrity: sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==} + '@vitest/pretty-format@3.2.4': + resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==} - '@vitest/runner@3.0.9': - resolution: {integrity: sha512-NX9oUXgF9HPfJSwl8tUZCMP1oGx2+Sf+ru6d05QjzQz4OwWg0psEzwY6VexP2tTHWdOkhKHUIZH+fS6nA7jfOw==} + '@vitest/runner@3.2.4': + resolution: {integrity: sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==} - '@vitest/snapshot@3.0.9': - resolution: {integrity: sha512-AiLUiuZ0FuA+/8i19mTYd+re5jqjEc2jZbgJ2up0VY0Ddyyxg/uUtBDpIFAy4uzKaQxOW8gMgBdAJJ2ydhu39A==} + '@vitest/snapshot@3.2.4': + resolution: {integrity: sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==} - '@vitest/spy@3.0.9': - resolution: {integrity: sha512-/CcK2UDl0aQ2wtkp3YVWldrpLRNCfVcIOFGlVGKO4R5eajsH393Z1yiXLVQ7vWsj26JOEjeZI0x5sm5P4OGUNQ==} + '@vitest/spy@3.2.4': + resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==} - '@vitest/utils@3.0.9': - resolution: {integrity: sha512-ilHM5fHhZ89MCp5aAaM9uhfl1c2JdxVxl3McqsdVyVNN6JffnEen8UMCdRTzOhGXNQGo5GNL9QugHrz727Wnng==} + '@vitest/utils@3.2.4': + resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} - '@zag-js/accordion@1.7.0': - resolution: {integrity: sha512-LNJOjLTW2KwrToXBrXIbNIAiISA94n0AdWp14H8RrskdokywmEGiC0GgWTGEJ7DNA6TGP6Ae5o9rJ4fHSmCsDQ==} + '@zag-js/accordion@1.21.0': + resolution: {integrity: sha512-YuuQs72AmA52Hn30l3Q8KyFDb75g9glFV7AZkUq8V52vtUsdz2PfJye1FPD06M2dnnhHjEbdTQch6Qwwe5ApBA==} - '@zag-js/anatomy@1.7.0': - resolution: {integrity: sha512-fkRgH6vPCwykmRdV38uAJeTtJc8tayAnURfoovHAtB9bK0goagPbpdcYTNyGn8msul0h+KBloOtnw4obvX0nPw==} + '@zag-js/anatomy@1.21.0': + resolution: {integrity: sha512-wL5mmewTR8FJd91ZbfwiXpoMJbaQr1F1fFDel5BJgQukScNzd53HS5zhYb15eqJIOR6tlk/itPiJkxPp/+HdcQ==} - '@zag-js/aria-hidden@1.7.0': - resolution: {integrity: sha512-YNbACFZoqw/1JymxCZXtuAFdeYZm7sK3E0jv3bPbqytPj7TziLa1dRDWDdx8cPcu0B4n4WrBMBSCGUjj/nWDCA==} + '@zag-js/angle-slider@1.21.0': + resolution: {integrity: sha512-1d4VgxYv4LQL8PtjkYqvPlx7DsZpG0CaB1woOhPZSva7jmo0WKvTAUZf2pbk9ajTm+iA4C3xHRbVRM6s2Vy/lg==} - '@zag-js/auto-resize@1.7.0': - resolution: {integrity: sha512-ifWflzZc1fNJ+XUZaYpB220AiAr4l3Eczq8ELwj/ugg7T/10Wo0FkxTCVmCZfIiCMoqHuh/2oTX3PCTIwg6uxg==} + '@zag-js/aria-hidden@1.21.0': + resolution: {integrity: sha512-x78v+v/rNYoCFHeHK343kapdevywctNUEmPGdiH2BT3BI7uXZtv270WkD9OgdEOuEKuu18vbZ9TGYO9FGG8Ijw==} - '@zag-js/avatar@1.7.0': - resolution: {integrity: sha512-vzMCMpYIM2BIvPvK34VaRMUsUSpg3jwoxCzA31k+QrCmjm3ti8pLoT4waE01XHiaQwNPcTFbMWUi/nIQQKG14A==} + '@zag-js/auto-resize@1.21.0': + resolution: {integrity: sha512-bQZUC5tP5SFdVcZ8vTA2tQy4B/YphwJaKCkG0Y6lHscpcPcZK7+kgBJaRj4XQuon7aKmgECLlD/da5PNNAdOJg==} - '@zag-js/carousel@1.7.0': - resolution: {integrity: sha512-bSbo00J7/4EhXKluQnCmH3dg+GjsI1dcogMNtY3Qe/hTUJI9F8ygXHWzkbEqe2iY8JkBucRm+IVdlAOGAjVARQ==} + '@zag-js/avatar@1.21.0': + resolution: {integrity: sha512-bRkEaoSbJ8Dae246cc0ShmXLBWDcJIcI1KoncST4ClYwCqyMIj4s/zgr1+XUlyz3imz6n1RhTeT2jKcBqFGC6Q==} - '@zag-js/checkbox@1.7.0': - resolution: {integrity: sha512-zhisqMrgKZNHyb5n4xN5JYdPU8P+duPJfy18SiHRMghi7rJrfnQZ/Ec+uEih1cGhu85juco5k9ud/AiT7bD6MA==} + '@zag-js/carousel@1.21.0': + resolution: {integrity: sha512-MpGLu6xVyPGDk5OupyTFywb85xrqCEs8qR0FpOH5eyNp3lvx/iLVNMcI+KTk5YTlZWQmDCyT86wBLMlf6SfTvw==} - '@zag-js/clipboard@1.7.0': - resolution: {integrity: sha512-rPLoIE7zKBRiHwAzSu/hT21ICMP7TmSWZGvCPV0hjtAE/sFAf/rsEwcx2DT3uBhUtoFQR7tqNRn4CnIGWkr2Fg==} + '@zag-js/checkbox@1.21.0': + resolution: {integrity: sha512-lY9DYOvz0Cbdi3jxudv/nj9cpaGk784RiookL7QHr1u/Z/sUSNj5gUNpsIkSzZmT054Tu0t0jhtTt8vScq8DmQ==} - '@zag-js/collapsible@1.7.0': - resolution: {integrity: sha512-W6+3tAC/ilU/ffCLhdJ2bMMTuZSgHnCaLMQemUUS4kMLKUyEdXTqxKzaTEqcBQfHotsYLQUfrK57hoiAKE/UgA==} + '@zag-js/clipboard@1.21.0': + resolution: {integrity: sha512-hJl4o8itwvVW3Wz5Zd/OQjR2OhXKdjHqIUuvPGbKcKEWxk6X9SDISslmCH9FbKVGVDgM6q5UypaYwwJZ1SsONQ==} - '@zag-js/collection@1.7.0': - resolution: {integrity: sha512-gH7I03ag2niEhCVgNpXBYybnIROGXmAkX+5e1rYQ60mOh2oQnK+5k9k3DRkca5rAKbu4uT6JjYFwnY9sA/NZfA==} + '@zag-js/collapsible@1.21.0': + resolution: {integrity: sha512-6vdZyZauYdiedlh6hcsYDF5Q5eC/vWstbP88PzeCFSxV5hKCJKxENOTd6d4OXJuYeWGkUABdgOl5MLIZVHrYCA==} - '@zag-js/color-picker@1.7.0': - resolution: {integrity: sha512-t439DB6EUrcj4f+MsLOIpttr3hsP4j3OgznJwSlwWt7Wsyhu9uX7cyevA56w4L4nt7lD1AP7305eN6AnILakjg==} + '@zag-js/collection@1.21.0': + resolution: {integrity: sha512-wJYmazXIFnr4/azWI9yeYrK3rB1d0KoaUMhOkrmGnwfp3c0U6rrUL54RuCMeyZ9WmzIUBhjZ5zc+385nsXwlPA==} - '@zag-js/color-utils@1.7.0': - resolution: {integrity: sha512-OvBr4v0x7/Hkts4NFychApkSoV0kDuLhRdcjm1DcHbX5DBGlptnDqGZaswbs5KMYXXH23HDgnBRWmnvmfmGDkg==} + '@zag-js/color-picker@1.21.0': + resolution: {integrity: sha512-vovzxNdINPloc5SCBBwZX1/qQnvpGAs++82GUDBGdrdai/ayBYUMkP6Hd0OiStkEDunECpfDv4Qff3kobUIgpg==} - '@zag-js/combobox@1.7.0': - resolution: {integrity: sha512-kaMvGoBZwiFC9KaUbHXNFkneg7grZmJlteVxk6kJXYd7JGDHhhYsFznPNIC0apvBCIEqwyBGVB/lCjK+BseZtw==} + '@zag-js/color-utils@1.21.0': + resolution: {integrity: sha512-phUCKXeDvgnSUdLtjF6oE7HRmFEqNPkKOH2Nkhlnt9Hi8uxW9xhG3Haix7DaBhCN2DLRZqpsULpCA5eYV+S8IA==} - '@zag-js/core@1.7.0': - resolution: {integrity: sha512-FyK1POPqgBp7DBpUIwvmBQH16+L52NaTaQJzg8iTI9mI/4m3AxZ5aN+8a8qzwGIkVI6rlDcrBkmuOcHDVIOEGA==} + '@zag-js/combobox@1.21.0': + resolution: {integrity: sha512-aVEbcRk2JilDhGJjAmmO1YI4B8lNOeqgDxsbdWDDcgivHOzo1b5Rt+5kfyodXVOlzQAPkdq04b5/xLR9eurnJw==} - '@zag-js/date-picker@1.7.0': - resolution: {integrity: sha512-64UEmdN74I4aOPS1+7zNSl0VHzUIVLDfgXw0QZ24miMM+SYVcZ1+KSVI4yeS4SETwGpdm9YkvN4z3guCtwcS+w==} + '@zag-js/core@1.21.0': + resolution: {integrity: sha512-ERQklS65W2wZD7Xvm/w/7u1nL5ZcTwK6Ppwat8EfAidBGGUB6YoZLW9Vu3I04g5SPhRmDmuIXhkTqKgIbXUUYg==} + + '@zag-js/date-picker@1.21.0': + resolution: {integrity: sha512-pfZXvjuF89NfV6CTc4BayPEAujysJ5vRSVFArsDbz5oKB8j5PCRtvHEHo0WWwgF7Jr40CTmiG68wzuDMCdXq3A==} peerDependencies: '@internationalized/date': '>=3.0.0' - '@zag-js/date-utils@1.7.0': - resolution: {integrity: sha512-zZHFx3ZuIHB38qTQzG9/frj9nFLE3JUwMkiueIVdPEgaRl7Tx5VZ3NcDKXQn9ebmXi/Zk9YOAUBr7aGXBBOAcA==} + '@zag-js/date-utils@1.21.0': + resolution: {integrity: sha512-4H0Z/zQFfpTL45rUZg3tH4lJQmsV6PDTml/ptj9I8/1Mxel5eOwBdmDfQ7owm47H7MjgUvm7CqvYT9987b0KXA==} peerDependencies: '@internationalized/date': '>=3.0.0' - '@zag-js/dialog@1.7.0': - resolution: {integrity: sha512-gx/CtKsPg/Y+2d+HtP3tjEdl7KM+x6lUDttjDDBn9rvXFs2REW69AlcJtRzs6B22CxDPmxssGPr1oi3zaU1AUA==} + '@zag-js/dialog@1.21.0': + resolution: {integrity: sha512-nAKoCnpd40UeprYl2JazDZVL3r5uHD1L4dUEeY9GlO4CINYBvt7jntVJn1xLGm1tyc4S+kFUSgI1y1DXlS+8KQ==} + + '@zag-js/dismissable@1.21.0': + resolution: {integrity: sha512-+BewcHUJvNCRWZ4lbUqABW6EwJRM2hxf65OPcN9XCMFCAoHbezdqHXYgtU7LRvYUJyxbvLPNeUrww3D6vcyhmA==} - '@zag-js/dismissable@1.7.0': - resolution: {integrity: sha512-o6S++e7iaBmizIgsvLt5RwY7gn2OQGeG2etet+oaUAMtNhi/1+uGG+rTZgOMj/MGg9BYpPld5tXfk/RrlShh9Q==} + '@zag-js/dom-query@1.21.0': + resolution: {integrity: sha512-P7Aeb1hfd5GtmTO1u0HkyVUrhFYgm94NxJhqufF2W+xByz/XspDcdy0l5pHFGsK9Urvh69S4tCx5YVh0MhZYgQ==} - '@zag-js/dom-query@1.7.0': - resolution: {integrity: sha512-cj+mKB7Sj7mqAepHMsbV4bGvDJfUYCt4d4ruYw0dVpDa1Z9N38TtztTznfrm9kuqOYcJkgE0q3Rn/kPLi8rK8g==} + '@zag-js/editable@1.21.0': + resolution: {integrity: sha512-28QivG0KU8OCgsldxi6rVLuqr36cNiuy1vTEzcoc61Ue6B1D4rCBAQaAJedl5r1ki+Vzrjl3uP1ApoUwV3S/JA==} - '@zag-js/editable@1.7.0': - resolution: {integrity: sha512-tNRDr95B+mFLk6Z8Fh0+BiDiCWsUt1iR0pIjFy88Y4YjGYd8Q71yNt1SLNKTD3DZnDGmlbRUB/4CaP+jso4aYQ==} + '@zag-js/file-upload@1.21.0': + resolution: {integrity: sha512-uH55bwFKcftpUYACyHT/8xB2bJdDqe3NM3JNCEYplxvn4scvDEzr2jpyVEmqUeOfrdNnyTuthNnL2hJjm4e+4A==} - '@zag-js/element-rect@1.7.0': - resolution: {integrity: sha512-j0h1+DASUI5urwBCELdjfk4oekLQ0D2v3a1wQJopGh+ITRVAC1gE1YFx3O+vnP2HwqANxG4+RQHwoQBM2bMBCQ==} + '@zag-js/file-utils@1.21.0': + resolution: {integrity: sha512-gEWmz2ryuJMyAq3kg13TTmh5wR4Ft7d4Lb81ZeHiPpI/IwW67QrpBN0AKw3FBTmAuYBpK/dEc5iyETNPPrPTvg==} - '@zag-js/element-size@1.7.0': - resolution: {integrity: sha512-Nq+HxG64Ts1QvaJPeDuy8zo/RqcbE95RPNVuHBwuxK3sbXOt7umgIrxQMp8uH+1xeJlp7F8/ydKOPyKOTtgiJg==} + '@zag-js/floating-panel@1.21.0': + resolution: {integrity: sha512-PVszFoJ53Iqmx+JD7WQFydRpp6spZFP1bCuBaHSoI044Z57UJ+rAkSlOGpoRHwpSROO9FPIpeqoTgy/kOCNmOA==} - '@zag-js/file-upload@1.7.0': - resolution: {integrity: sha512-6yJhUDLYsqbd0YBO70PzMDNVJJv8OdC0ZWrf51GMUSugGfSpvQZNDfpAW5Zkzqd4B5nkJDw5KiTSR5NYQlO7VA==} + '@zag-js/focus-trap@1.21.0': + resolution: {integrity: sha512-O00KOYOVPWWv/eATfeZxRTEvUTLv+eHJH6ynqOAvQ7RXmsECst4QlL9UJwStrTKn/r2gxhj+UZMwHMEwTGNeVg==} - '@zag-js/file-utils@1.7.0': - resolution: {integrity: sha512-Wb1VoI7UquG1ckJPMFPnmgLg351NI55SXjsEq+CrqgKQCo0httYFLPlkOpp4AbGsoUFZxXRxEXDEVzq5kpPFzQ==} + '@zag-js/focus-visible@1.21.0': + resolution: {integrity: sha512-FNA7H4hyoQRBKpDkJWlBrFeyJpVphATgjvjhNXatCrrfa4F7VZiGnu3RGhEcnaw4b3bNkFnYLdRd+9XX7JHuoA==} - '@zag-js/focus-trap@1.7.0': - resolution: {integrity: sha512-JHMZAfiL1aoxMAQGolx+iDMgqOMy067yffaLr1tMX55NGZPfEyXEjgxmPXRPf728/7IOShLkWLX17yacmW/w/Q==} + '@zag-js/highlight-word@1.21.0': + resolution: {integrity: sha512-bJIwPtcAMfEP6c5R/a3ZQG1V5FvYBP9onMVwKranAWPqOUj1/Y6lQ2gV/K4s7sw3VnpoXmy+5VxwfOPU/QWU5Q==} - '@zag-js/focus-visible@1.7.0': - resolution: {integrity: sha512-ycrO6VetctoA7aaw83rnp3erDmQe2Zsyobzp4fzpMbOBTNWzMklt4Kz54xa1ntkia8CpSWVfoauORLlaZoDiAw==} + '@zag-js/hover-card@1.21.0': + resolution: {integrity: sha512-G4+/lnc4ATU7BVHlnQ77fNC1b2k9dcbIeaBPMcdnc+g+CtqNhNTBM+rMb2OpSE9IOuFwqld5EK1v4tW8+6qOwQ==} - '@zag-js/highlight-word@1.7.0': - resolution: {integrity: sha512-dRw9GbMTh+CKKA4dH6n2TEmaayH2cB5Otnaawm+o+q3gkioVij8V/owWFbMZrszW6ajJX/TTdsVJ5IBdPvKhKg==} + '@zag-js/i18n-utils@1.21.0': + resolution: {integrity: sha512-5E+vVsL6zcfaLlSGSnB3olXIEzmZ4C5L53+jSnx8LqmIcuTEc8I8mvBhcpTiDVHKrH6jG3jHE+6BvdyJ9SWQiA==} - '@zag-js/hover-card@1.7.0': - resolution: {integrity: sha512-MqrLet1qaJfc2MEvHUWGLQ1OxgTz73gAD7oWXxnxks2Q/BXow2jU3+fVdseg3G63bmUbHXSdOkyGNo0mpHCV3Q==} + '@zag-js/interact-outside@1.21.0': + resolution: {integrity: sha512-Yo4lojJYJZ4fjavOz+VbdpZlcDFAOlrOX+rKss3BNKfaffmhCklx/8Zej7WFStPCAv8AOzZ+fE4EhH/w+uPXEw==} - '@zag-js/i18n-utils@1.7.0': - resolution: {integrity: sha512-CcDXxfobG2LlOU1m3xPzV5pXpCe0tSE9u+drtKMz7F/HOZkR3V0rpCCi/zKySPNa3uLC7G8efz1fGQXiOVKONw==} + '@zag-js/json-tree-utils@1.21.0': + resolution: {integrity: sha512-OSyIxdWUVWD44hCvSgR+hP0q9nJOejS1VI9P4dbphQfcLNVvntAfwrb1os0DUR++UKBHyhAYwKVuVdThYbkJYQ==} - '@zag-js/interact-outside@1.7.0': - resolution: {integrity: sha512-tmsVQmcH2N2X2mG2/8/+WRIo9WbRVvLe1OZa3lzFYV4Mu5i+tNK1CHMESpoAd/RdjJ6AyTR2zYiH05WZe76gMw==} + '@zag-js/listbox@1.21.0': + resolution: {integrity: sha512-XByByVOj4MA/ELcHgtkiS+jP5b2C2wXHmpCeCUp2jYKx3ZiL8al9y7yYLVBEDHRXsAR44UAQuJPIjDsCgtgkJg==} - '@zag-js/live-region@1.7.0': - resolution: {integrity: sha512-u2bYIAnBIY+GZqfPqxn2ZylOqE2blUVW7Yc2Z4Ey05K4JXSH2gKR3xPmJCS9/u8tcFKQz5L4KQ/98ntgBG2fGQ==} + '@zag-js/live-region@1.21.0': + resolution: {integrity: sha512-buHwgHkW95c8gYtk53AEmjS8r72AtDFRfD3l3OgMsBE/dnYYgM3bfpiZL3pP0IBK+WPKDJxS8TMj7Q7pBiQebQ==} - '@zag-js/menu@1.7.0': - resolution: {integrity: sha512-F2XbPC0cWrmj7nLrs1/is2osaPYX9blhEiZuEcGSrWG00w6xWyPb7bFpccW2nbq87JEc58xzW1pnTzPnaAnwSQ==} + '@zag-js/menu@1.21.0': + resolution: {integrity: sha512-usD3MQTobKlzplY3j9IZxiq6cGHUZ/N8qmmi+EKvo0xpsEimhyE+FHr9XHqmFfGsxcH/yvyuFkvEjaUrF3qsqQ==} - '@zag-js/number-input@1.7.0': - resolution: {integrity: sha512-zmStn38lscmSsX/P6hZQzan35nEstVmEGC6M3m5G+bzDRC+IR3h19yr1Ma+xXDkT1Vi21GaV0+rytf9WsYJg6Q==} + '@zag-js/number-input@1.21.0': + resolution: {integrity: sha512-77Z2tTI+PcOCaoxNoteXfLaZA0zxObrOxqAjTgwapM88kn9oGNU4Ln6AYMJqdIDZJtQWdLBGjJwi3R8h8irpNQ==} - '@zag-js/pagination@1.7.0': - resolution: {integrity: sha512-gIbJe1fIYlQCpXqWssET9CCmMWLvcz8OCCw7W3ASeLYRvUW3IzhkMAht5pEsvJEZ9tIWaab5fZ7OLqcgCTgVQw==} + '@zag-js/pagination@1.21.0': + resolution: {integrity: sha512-d3zXD17CTSsA3o+5oJB1CujEoYNph58/DHFwVFDRgH5lB5K1vBxgas+JxJ2++uhouI8BH5fz7w7X3Wr6kXEHIw==} - '@zag-js/pin-input@1.7.0': - resolution: {integrity: sha512-iQfUNfbtq28zPzFjmzDs7otRbFr+kC6luQM33wALZpmmVBNXb7yi9W6R14V6NJI3to6cAaHzRzn3ixxfQJEB3w==} + '@zag-js/password-input@1.21.0': + resolution: {integrity: sha512-paiZbGEBlkoas08qwrpQVUuZXG8efgti/u464eZR6x7drv6PVc9igWxfqFJXL378I/cEUjj5MvYdk9yMbLJcHg==} - '@zag-js/popover@1.7.0': - resolution: {integrity: sha512-Nf9grOVBWlnwQL+AR6X2hAy5bTNQng9xG2Cfo4E8rD2G/CJLKtUGCHHkG8xeQ969HT4urbOrgrZ5UpAhkpNlmw==} + '@zag-js/pin-input@1.21.0': + resolution: {integrity: sha512-Ut3tZ4rDhjopTTdMcNm3BIpTlAu3NR1Uw1w+WM5NTh5C7Vn+GZAL5dP1dahB/t29yqhTZY4ssMxZfDofBpfMHw==} - '@zag-js/popper@1.7.0': - resolution: {integrity: sha512-1Tr9ZBS2VPeZ/zeAR5uEBYLkWn4VcycbaDDkvWxa44fi6LxknDf064cP+ql9AfUp/eUGD2hN9OSEhyxB/JXjKQ==} + '@zag-js/popover@1.21.0': + resolution: {integrity: sha512-crDELtzKZo0hSXA1N8LFrleq/9QlSGRlUNNb0DoUW0/gFFBG3wsrLayn2gWHweeM9HBG60ZnZnBW//pXaS32sg==} - '@zag-js/presence@1.7.0': - resolution: {integrity: sha512-00YcVn3J0zwQ/DSEnbtbCx6UMokHXTiMF+CjNryPaaAOlLk/5s4ogEdrdguFvWxQ6zszQ3UxBh3H9pim+k7jVQ==} + '@zag-js/popper@1.21.0': + resolution: {integrity: sha512-PWLF6kY4f88CBM+nGebPJMB3DsXcj8NDuiLdljrGL4j1x18t1dhNY1IIdNDBueJCF0VL0uJrGwcxMZg6FGReSA==} - '@zag-js/progress@1.7.0': - resolution: {integrity: sha512-dfjPtUGRZW0pURBalm55ACoN083EJ90cDT1RRRF72JhqlRJu/vSXngjSUFtYuG1WADGS3D7F5XIFMo+PAGynFg==} + '@zag-js/presence@1.21.0': + resolution: {integrity: sha512-Fz7nhaoYbfbV6c8ovCnv75HaCD5yvU7NUxtR20wUYBPPx5nvdOViUsU+4ih/HXUcBHsQUW6teIfkf9Gb7xbCgQ==} - '@zag-js/qr-code@1.7.0': - resolution: {integrity: sha512-fg/hI2Py6D4E2cvh2BJ4PunYyyivkkRga76K9VDvq+hq1OezB6SzchLjFkIXn6431VK+xrU1HqcSR67KAn8IWA==} + '@zag-js/progress@1.21.0': + resolution: {integrity: sha512-AMZsoURX2jotI2KrODE4jw7e9FPslKIZCO/guh11D6A9gvSM3ECRe2gKdAcLjP+UKxayS8MkNPhD51bAYCfkbQ==} - '@zag-js/radio-group@1.7.0': - resolution: {integrity: sha512-9NlI5fTh8ZVX5nXm7nU/ZheQLZpHwrHZeKRjomVQQALEWuMZ5YJtVXZaUT5xsCRTk+LEQVSaKp10+aD/5cIMlA==} + '@zag-js/qr-code@1.21.0': + resolution: {integrity: sha512-mCe8qp+F9ZKS9Py/CkXmfAGMc9h86UM9NkXOWwU880az885Y0Ld8UaHmyWO3AAJDWPYBkTJKq+tEqNTCKx1dyw==} - '@zag-js/rating-group@1.7.0': - resolution: {integrity: sha512-jDr8M+2fXTxB9l8qm8ktA362eM6Xt6FzIz0dKlV1JsYr5KamhsZ70Y8MPB6i3b45FGdDdj02a2aaWGLRUaRnrw==} + '@zag-js/radio-group@1.21.0': + resolution: {integrity: sha512-TCb3RjiNhgFWzwHUns9S+z6rNyXng2kexFPmD1ycyEO1efHAb83J5aZv5ShGX/05YCZpwVMf3WsyGEV8p8c/1g==} - '@zag-js/react@1.7.0': - resolution: {integrity: sha512-phr7WMVJcwfOkiLwtobGWkdzVGdZmVQYvF7w8awloW0j1+YF2OdMYDZK8RauHwmg+sEVmqtGeZPr40hZNnKhVQ==} + '@zag-js/rating-group@1.21.0': + resolution: {integrity: sha512-TBjSGfHT06Ehj3lBACVB3pOnxmb+jvJQgBQUZtFYFMae+gtuKItwx9qleH24vuyqKT/DI3amQhbvpi+bUK9CVA==} + + '@zag-js/react@1.21.0': + resolution: {integrity: sha512-yTqpMJ2c6Sf/KqXmyq3yJg1W/VZhYn1YNBRKWYJYT/kUDnoOpyqIBbmwka0dZi/hnWdhK1pzV0UUa7oV4IWa/A==} peerDependencies: react: '>=18.0.0' react-dom: '>=18.0.0' - '@zag-js/rect-utils@1.7.0': - resolution: {integrity: sha512-VvpqanvSrD/a5Gf5VHCM9yhkaBFWWsYTRNNQBtguNDrOh/tFvQBFAwes/BxvT+4fG4xbBL/fbSZIyhZ77Q7L2w==} + '@zag-js/rect-utils@1.21.0': + resolution: {integrity: sha512-ulzlyupj7QnM5NdAHSy2uKscVanjApxcC5/FRu+ooUZRaK1A8BMqep6r7lsVB8qTz0l1ssjLqCJPGNzP3PB3ug==} - '@zag-js/remove-scroll@1.7.0': - resolution: {integrity: sha512-sjuBT/iHUZKoDaIdEa5fn0Ii6qjPbp/xO5g/2n2gI3RhRPjcc9jmrTxuvjKftB+ZoBy4GO8MbeaPKdQLIreufg==} + '@zag-js/remove-scroll@1.21.0': + resolution: {integrity: sha512-wsXEM7rUJnJrTmcCHsahtLfxaas/enHOakAB98n5YZelcoFFbE+iR91brb1yUbccfryvepozOac+EIWuO8/2aw==} - '@zag-js/scroll-snap@1.7.0': - resolution: {integrity: sha512-dvRmgdnT0AR2g0RtjgVXGJG6Si4gd+os56u1x3VKzAzlMZWYiFd0iyoKFCr/SCBEEMN/Y3ppkQoZjWOlnpah2g==} + '@zag-js/scroll-snap@1.21.0': + resolution: {integrity: sha512-H/8bQql4DjYFVpBG6j/EyUsdboCxyGjRzOg9SN8bA2aXNDBPh+/oLwnCWCqagd4A1VO6JxmuFmbcM2wW9Khmhw==} - '@zag-js/select@1.7.0': - resolution: {integrity: sha512-DmKIfoJFO42NgZOspEww5i6j71OqHgUCCodxR0zCmMoISxi1VYYKdjjeeSqivUYoH2mk9+z+lAJF+qdCo45Mzg==} + '@zag-js/select@1.21.0': + resolution: {integrity: sha512-wVxPzw9lmtCDWTPP0h6P8r7QL93VsyajwV0EPFKoa8HH4XWzl5QBuShXIzmD8dxbHA5HIdAZNYAC5BQCSW37Xw==} - '@zag-js/signature-pad@1.7.0': - resolution: {integrity: sha512-m81iwLl0TKsFPRnPLadVIM53q6b7NJJ6fgRH8Z+TImarorV4QcA0IXr2wcj1MLlIa4CPNiXoQrmOnOdIOFHvWA==} + '@zag-js/signature-pad@1.21.0': + resolution: {integrity: sha512-LUXHsMPXLNSaWBJ4WWY+ZSFpAbbPHfUAGOVh22bOIJWMRchcs4Cch42tFgg/sB8cREfc3G/CS5e2gIBqMigcEQ==} - '@zag-js/slider@1.7.0': - resolution: {integrity: sha512-0h9ejtOWa4XjxApcCFyGt7By22kd6gG4PdUZgXiKlPCQFgYrxWXZqMlwH6ZtyD4VYUuRPJ05CezDU5KlmZD/3A==} + '@zag-js/slider@1.21.0': + resolution: {integrity: sha512-dmH2j8Iu079UZf36TzfPBOYb2jGbvXHcV8x3zYiRWs4ccJDaSNBZieCWCY0/Nm5wI8l+ue/Buc1kcbpIytuWHQ==} - '@zag-js/splitter@1.7.0': - resolution: {integrity: sha512-iJiKgRqIp/gbzjTajLIjpoc8dVBhjrTGauwVFj2yfKlkM30lgBRBHPtnrtsVox2A5ZyTikuj2ZtMCFXJAL8BDA==} + '@zag-js/splitter@1.21.0': + resolution: {integrity: sha512-blsSe3UrhEYieLF2fuO7UM0t2rQxFTeLYMSjuxFspdYZz47VnEKtVypgQUZnQX5dyttyV49vl1g7+AbBBlk6bA==} - '@zag-js/steps@1.7.0': - resolution: {integrity: sha512-niYlKAy4j7yariPVbPJwBgzWhEsE82d7JIxD4yQW1nyyM6+xAgZrJaTG6WY1ogiBLCDj5kZw1rJv1uBBF6I5EA==} + '@zag-js/steps@1.21.0': + resolution: {integrity: sha512-w0nzJBgYe/A04pNZN1mv1hRT44MVwwRf9VvlBFIS1CxVpUOGkDoVrzRb/CX1zpOhMdtF8w7+FfgT6Q3/oVJ4+A==} - '@zag-js/store@1.7.0': - resolution: {integrity: sha512-3n+AGo3Y3d1+SkEjY/6QPcDU5kfGu4DEA9qMxJgnnOlYT07SEWByMQD2uoEji9M9psHcVvxm86OnF3Y6UuTsuA==} + '@zag-js/store@1.21.0': + resolution: {integrity: sha512-UCAuYWui3+VYfp8KdECXuM+L8tKzQYyNz+7KrRPHyZ37wgHjz4M+QNj/QP5GgDStLJaF3UgbuLYwbXSQ/3WcWw==} - '@zag-js/switch@1.7.0': - resolution: {integrity: sha512-sz3whYMAD949fJ5v9DegU43SrpUNKhoPOum4LOpoSrh364ePfm7ShsTIgJnqPrdMknr+17ljLx54tXPS1SsMTw==} + '@zag-js/switch@1.21.0': + resolution: {integrity: sha512-erQ05qU9UUTOKkq77X+fTBOnng75ZFugcbcx4HWkACs9aUQmh9JoRF/1+HzFvRf8SyfuEdiSP25Q+ozmiOUmXQ==} - '@zag-js/tabs@1.7.0': - resolution: {integrity: sha512-bAMp7Vhyis5j3BSKs4m0OwsbchRLLzFf6Yaf54CNraAUdKRwLQckznrajQLPI5F+BrHkGzMXvj/lt9jlGiKDcw==} + '@zag-js/tabs@1.21.0': + resolution: {integrity: sha512-ecRS8F5M6QCAln4ob8waySRmSPozbOZ5dq1GGmaVExBwbrOA4C3ZbrHU3Dhmmx8vUji+rOSRifyhHwCTY0PTqQ==} - '@zag-js/tags-input@1.7.0': - resolution: {integrity: sha512-ME/KwP1yrPHX0bP0EqkHI30IQgrE2cAkREoRluM5ScpG3Uiug98x6+zts0YS9j1OB3pyTl0d4alECBruxN8cPA==} + '@zag-js/tags-input@1.21.0': + resolution: {integrity: sha512-i/3PvNMhUloVi2DO+CRAEHtosu/Xmjcuj7Q3wY1acTORkoyXJrynmKmUcjF2D5ySHuey+Q07ADztlpa9ZHjr8Q==} - '@zag-js/time-picker@1.7.0': - resolution: {integrity: sha512-oeJ/2cHUD/iNF9LVWeFZ0ZrUDpMcSjb1lScqmrDdSuBpt9Hv5NLwjKFVeCtcE7VP3ijgN1VHY5FJzqQyynK9tw==} + '@zag-js/time-picker@1.21.0': + resolution: {integrity: sha512-GIBgfHfo2pYnl9MD0fVNaJ6UE63dOs+T0DFPhBf3DazNR9r4qhK0QXQLRQyH57KD+kcjKiJNgMGRKsKbX88aEw==} peerDependencies: '@internationalized/date': '>=3.0.0' - '@zag-js/timer@1.7.0': - resolution: {integrity: sha512-IpFkbuyBPJl/1idCchljtpZ0PirJWHLpvoFrEnyXQ7clyIeeLuYjyMMfwG+BVWZ7BeYby9A+b0+UNksvoJLtvQ==} + '@zag-js/timer@1.21.0': + resolution: {integrity: sha512-vFohY91xnJVV6iSkT6tESLIrFssZsE02LbnXjHEnEVajC0jXLExvIu70t+5CWmP08e2yfp7E+G9WI1cDyzS/SQ==} - '@zag-js/toast@1.7.0': - resolution: {integrity: sha512-tvEO1vpC9QZ0oYJOKay2dvcq5lAPn4MT7ahnALs89iVjhWyguXAs5kzoq/Devlbuhi+bUY1YxvtrMDJjYVFhaA==} + '@zag-js/toast@1.21.0': + resolution: {integrity: sha512-DMvdLMQFGGwNxRjnzEsszocBWreQ+4spvQTrolra9pp7PuklodnIIuxRNNQ7bQVd1wH/pQPkEwXTbusb4NMBgw==} - '@zag-js/toggle-group@1.7.0': - resolution: {integrity: sha512-qf8S66MUSw95S65BFH+PUtPs6GCLd39MWHJqzvZSXS+UWCLNXQlK8ayrNYh6CQgtgNeyljMqc2pFGWmp+M987w==} + '@zag-js/toggle-group@1.21.0': + resolution: {integrity: sha512-zUxLj0sXCUixI3C7lMEekQc8jQlFd0Y70a3/MO5xC/sem3pucPS30rulcvp7b3d9TLJk8YVofpvAjdRPDyb9XA==} - '@zag-js/toggle@1.7.0': - resolution: {integrity: sha512-94TEthfGXjNmPcIiaOlwwEm73SSI2rRVn6FPviatzQU/OcDaaiAxuvGMIkW7Ov4+1sniAElGP24LTnyz0QuQpg==} + '@zag-js/toggle@1.21.0': + resolution: {integrity: sha512-+toPS8gviWYDAatyuFOWooHts5LP368UYsubedxZAgyz+qE6Mo8j282k2iGvmzrM22WcplRXVzgZ0JYUFVPtbQ==} - '@zag-js/tooltip@1.7.0': - resolution: {integrity: sha512-ehZOewcxYZL4+ND5QMeDlQQrckssMTzxcReRCOVFXrRZb5X1jX6+ale9MSG+cJYMpQUqT2J5VtzMJH+GNj/jfw==} + '@zag-js/tooltip@1.21.0': + resolution: {integrity: sha512-X7t93MPvB0T82HT9QRlfh+Ts8QwAeouSDmaCCrF5/tdIsMTuzEzGqWtaPbXTDfMGrsG2umlIiIVSraWDe6aAIQ==} - '@zag-js/tour@1.7.0': - resolution: {integrity: sha512-P8wYE0OpW1GtopvQ7ELdF2SuTMI64iBSr4UYGRCt2WkbrjP0vkFp35iUEbFmE44cRKIF8jGU6gznSPCGnGjz9A==} + '@zag-js/tour@1.21.0': + resolution: {integrity: sha512-441Az3byK0vP2zL67p4z5m7s/0B7uHicLdvS0rKjoI+2gZ9Qd8yGuzTSfMJY2lWn+407iswN/koY7Kz5K0srFg==} - '@zag-js/tree-view@1.7.0': - resolution: {integrity: sha512-ULjbcLG3PqYV5BKNW8Z9Ikh+67GblYhEscgfBN4X3BLv9KOG6J0Gp4JQkxkWBTeRpUCTnoBgZ1ZbeOFgNJbcfQ==} + '@zag-js/tree-view@1.21.0': + resolution: {integrity: sha512-gMjmy+sdZsLm75pwLH8M5qCOnsXA2KIGt0lKcfL/qAhYqDVaXm6xnx43JhJxSvVvqPqDuP1W8R5vUkBtEXV5Ig==} - '@zag-js/types@1.7.0': - resolution: {integrity: sha512-rmPonVc8EBOGIEJYjzWIBQ6LJwUMc3LnipRREECO+n7LNlUQUliCOFbHw1UOGP+4ZkCKmxjGFR3jLtjY8aN4gQ==} + '@zag-js/types@1.21.0': + resolution: {integrity: sha512-ozT8aTeqCKsPYQDqIgkjkJnXBEADvV8nj8ZuXUzm7RhIN9EqeqpQyOdA7GdYrrDY5bgmdzyzmJu+e/2PbWg/ng==} - '@zag-js/utils@1.7.0': - resolution: {integrity: sha512-yIxvH5V27a1WuLgCxHX7qpdtFo8vTJaZLafBpSNfVYG4B8FaxTE+P7JAcpmAzs3UyXura/WfAY2eVWWVBpk9ZA==} + '@zag-js/utils@1.21.0': + resolution: {integrity: sha512-yI/CZizbk387TdkDCy9Uc4l53uaeQuWAIJESrmAwwq6yMNbHZ2dm5+1NHdZr/guES5TgyJa/BYJsNJeCsCfesg==} acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - acorn@8.14.1: - resolution: {integrity: sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==} + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} engines: {node: '>=0.4.0'} hasBin: true @@ -1193,8 +1275,8 @@ packages: resolution: {integrity: sha512-Xm7bpRXnDSX2YE2YFfBk2FnF0ep6tmG7xPh8iHee8MIcrgq762Nkce856dYtJYLkuIoYZvGfTs/PbZhideTcEg==} engines: {node: '>=4'} - axios@1.8.4: - resolution: {integrity: sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==} + axios@1.11.0: + resolution: {integrity: sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==} axobject-query@4.1.0: resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==} @@ -1211,24 +1293,24 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - brace-expansion@1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} - brace-expansion@2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - browserslist@4.24.4: - resolution: {integrity: sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==} + browserslist@4.25.1: + resolution: {integrity: sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true - builtin-modules@3.3.0: - resolution: {integrity: sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==} - engines: {node: '>=6'} + builtin-modules@5.0.0: + resolution: {integrity: sha512-bkXY9WsVpY7CvMhKSR6pZilZu9Ln5WDrKVBUXf2S443etkmEO4V58heTecXcUIsNsi4Rx8JUO4NfX1IcQl4deg==} + engines: {node: '>=18.20'} c12@1.11.1: resolution: {integrity: sha512-KDU0TvSvVdaYcQKQ6iPHATGz/7p/KiVjPg4vQrB6Jg/wX9R0yl5RZxWm9IoZqaIHD2+6PZd81+KMGwRr/lRIUg==} @@ -1262,21 +1344,20 @@ packages: resolution: {integrity: sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA==} engines: {node: '>=16'} - caniuse-lite@1.0.30001707: - resolution: {integrity: sha512-3qtRjw/HQSMlDWf+X79N206fepf4SOOU6SQLMaq/0KkZLmSjPxAkBOQQ+FxbHKfHmYLZFfdWsO3KA90ceHPSnw==} + caniuse-lite@1.0.30001727: + resolution: {integrity: sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==} chai@5.2.0: resolution: {integrity: sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==} engines: {node: '>=12'} - chalk@3.0.0: - resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} - engines: {node: '>=8'} - chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} + change-case@5.4.4: + resolution: {integrity: sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==} + check-error@2.1.1: resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} engines: {node: '>= 16'} @@ -1289,8 +1370,8 @@ packages: resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} engines: {node: '>=10'} - ci-info@4.2.0: - resolution: {integrity: sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg==} + ci-info@4.3.0: + resolution: {integrity: sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==} engines: {node: '>=8'} citty@0.1.6: @@ -1335,8 +1416,8 @@ packages: resolution: {integrity: sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==} engines: {node: '>=18'} - core-js-compat@3.41.0: - resolution: {integrity: sha512-RFsU9LySVue9RTwdDVX/T0e2Y6jRYWXERKElIjpuEOEnxaXffI0X7RUwVzfYLfzuLXSNJDYoRYUAmRUcyln20A==} + core-js-compat@3.44.0: + resolution: {integrity: sha512-JepmAj2zfl6ogy34qfWtcE7nHKAJnKsQFRn++scjVS2bZFllwptzw61BZcZFYBPpUznLfAvh0LGhxKppk04ClA==} cosmiconfig@7.1.0: resolution: {integrity: sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==} @@ -1367,8 +1448,8 @@ packages: resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==} engines: {node: '>= 0.4'} - debug@4.4.0: - resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} engines: {node: '>=6.0'} peerDependencies: supports-color: '*' @@ -1426,8 +1507,8 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - electron-to-chromium@1.5.128: - resolution: {integrity: sha512-bo1A4HH/NS522Ws0QNFIzyPcyUUNV/yyy70Ho1xqfGYzPUme2F/xr4tlEOuM6/A538U1vDA7a4XfCd1CKRegKQ==} + electron-to-chromium@1.5.189: + resolution: {integrity: sha512-y9D1ntS1ruO/pZ/V2FtLE+JXLQe28XoRpZ7QCCo0T8LdQladzdcOVQZH/IWLVJvCw12OGMb6hYOeOAjntCmJRQ==} emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -1454,8 +1535,8 @@ packages: resolution: {integrity: sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w==} engines: {node: '>= 0.4'} - es-module-lexer@1.6.0: - resolution: {integrity: sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==} + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} es-object-atoms@1.1.1: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} @@ -1473,8 +1554,8 @@ packages: resolution: {integrity: sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==} engines: {node: '>= 0.4'} - esbuild@0.25.2: - resolution: {integrity: sha512-16854zccKPnC+toMywC+uKNeYSv+/eXkevRAfwRD/G9Cleq66m8XFIrigkbvauLLlCfDL45Q2cWegSg53gGBnQ==} + esbuild@0.25.8: + resolution: {integrity: sha512-vVC0USHGtMi8+R4Kz8rt6JhEWLxsv9Rnu/lGYbPR8u47B+DCBksq9JarW0zOO7bs37hyOK1l2/oqtbciutL5+Q==} engines: {node: '>=18'} hasBin: true @@ -1490,8 +1571,8 @@ packages: resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} engines: {node: '>=10'} - eslint-config-prettier@10.1.1: - resolution: {integrity: sha512-4EQQr6wXwS+ZJSzaR5ZCrYgLxqvUjdXctaEtBqHcbkW944B1NQyO4qpdHQbXBONfwxXdkAY81HH4+LUfrg+zPw==} + eslint-config-prettier@10.1.8: + resolution: {integrity: sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==} hasBin: true peerDependencies: eslint: '>=7.0.0' @@ -1502,14 +1583,14 @@ packages: peerDependencies: eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9 - eslint-plugin-perfectionist@4.10.1: - resolution: {integrity: sha512-GXwFfL47RfBLZRGQdrvGZw9Ali2T2GPW8p4Gyj2fyWQ9396R/HgJMf0m9kn7D6WXRwrINfTDGLS+QYIeok9qEg==} + eslint-plugin-perfectionist@4.15.0: + resolution: {integrity: sha512-pC7PgoXyDnEXe14xvRUhBII8A3zRgggKqJFx2a82fjrItDs1BSI7zdZnQtM2yQvcyod6/ujmzb7ejKPx8lZTnw==} engines: {node: ^18.0.0 || >=20.0.0} peerDependencies: eslint: '>=8.45.0' - eslint-plugin-prettier@5.2.5: - resolution: {integrity: sha512-IKKP8R87pJyMl7WWamLgPkloB16dagPIdd2FjBDbyRYPKo93wS/NbCOPh6gH+ieNLC+XZrhJt/kWj0PS/DFdmg==} + eslint-plugin-prettier@5.5.3: + resolution: {integrity: sha512-NAdMYww51ehKfDyDhv59/eIItUVzU0Io9H2E8nHNGKEeeqlnci+1gCvrHib6EmZdf6GxF+LCV5K7UC65Ezvw7w==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: '@types/eslint': '>=8.0.0' @@ -1522,43 +1603,43 @@ packages: eslint-config-prettier: optional: true - eslint-plugin-react-hooks@4.6.2: - resolution: {integrity: sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==} + eslint-plugin-react-hooks@5.2.0: + resolution: {integrity: sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg==} engines: {node: '>=10'} peerDependencies: - eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 - eslint-plugin-react-refresh@0.4.19: - resolution: {integrity: sha512-eyy8pcr/YxSYjBoqIFSrlbn9i/xvxUFa8CjzAYo9cFjgGXqq1hyjihcpZvxRLalpaWmueWR81xn7vuKmAFijDQ==} + eslint-plugin-react-refresh@0.4.20: + resolution: {integrity: sha512-XpbHQ2q5gUF8BGOX4dHe+71qoirYMhApEPZ7sfhF/dNnOF1UXnCMGZf79SFTBO7Bz5YEIT4TMieSlJBWhP9WBA==} peerDependencies: eslint: '>=8.40' - eslint-plugin-react@7.37.4: - resolution: {integrity: sha512-BGP0jRmfYyvOyvMoRX/uoUeW+GqNj9y16bPQzqAHf3AYII/tDs+jMN0dBVkl88/OZwNGwrVFxE7riHsXVfy/LQ==} + eslint-plugin-react@7.37.5: + resolution: {integrity: sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==} engines: {node: '>=4'} peerDependencies: eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7 - eslint-plugin-unicorn@55.0.0: - resolution: {integrity: sha512-n3AKiVpY2/uDcGrS3+QsYDkjPfaOrNrsfQxU9nt5nitd9KuvVXrfAvgCO9DYPSfap+Gqjw9EOrXIsBp5tlHZjA==} - engines: {node: '>=18.18'} + eslint-plugin-unicorn@60.0.0: + resolution: {integrity: sha512-QUzTefvP8stfSXsqKQ+vBQSEsXIlAiCduS/V1Em+FKgL9c21U/IIm20/e3MFy1jyCf14tHAhqC1sX8OTy6VUCg==} + engines: {node: ^20.10.0 || >=21.0.0} peerDependencies: - eslint: '>=8.56.0' + eslint: '>=9.29.0' - eslint-scope@8.3.0: - resolution: {integrity: sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==} + eslint-scope@8.4.0: + resolution: {integrity: sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} eslint-visitor-keys@3.4.3: resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - eslint-visitor-keys@4.2.0: - resolution: {integrity: sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==} + eslint-visitor-keys@4.2.1: + resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - eslint@9.23.0: - resolution: {integrity: sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw==} + eslint@9.32.0: + resolution: {integrity: sha512-LSehfdpgMeWcTZkWZVIJl+tkZ2nuSkyyB9C27MZqFWXuph7DvaowgcTvKqxvpLW1JZIk8PN7hFY3Rj9LQ7m7lg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} hasBin: true peerDependencies: @@ -1567,8 +1648,8 @@ packages: jiti: optional: true - espree@10.3.0: - resolution: {integrity: sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==} + espree@10.4.0: + resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} esquery@1.6.0: @@ -1590,8 +1671,8 @@ packages: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} - expect-type@1.2.0: - resolution: {integrity: sha512-80F22aiJ3GLyVnS/B3HzgR6RelZVumzj9jkL0Rhz4h0xYbNW9PjlQz5h3J/SShErbXBc295vseR4/MIbVmUbeA==} + expect-type@1.2.1: + resolution: {integrity: sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==} engines: {node: '>=12.0.0'} fast-deep-equal@3.1.3: @@ -1616,6 +1697,14 @@ packages: fastq@1.19.1: resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} + fdir@6.4.6: + resolution: {integrity: sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + file-entry-cache@8.0.0: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} @@ -1627,9 +1716,9 @@ packages: find-root@1.1.0: resolution: {integrity: sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==} - find-up@4.1.0: - resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} - engines: {node: '>=8'} + find-up-simple@1.0.1: + resolution: {integrity: sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ==} + engines: {node: '>=18'} find-up@5.0.0: resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} @@ -1659,8 +1748,8 @@ packages: resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} engines: {node: '>=14'} - form-data@4.0.2: - resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==} + form-data@4.0.4: + resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==} engines: {node: '>= 6'} fs-minipass@2.1.0: @@ -1718,8 +1807,8 @@ packages: resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} engines: {node: '>=18'} - globals@15.15.0: - resolution: {integrity: sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==} + globals@16.3.0: + resolution: {integrity: sha512-bqWEnJ1Nt3neqx2q5SFfGS8r/ahumIakg3HcwtNlrVlwXIeNumWn/c7Pn/wKzGhf6SaW6H6uWXLqC30STCMchQ==} engines: {node: '>=18'} globalthis@1.0.4: @@ -1738,9 +1827,9 @@ packages: engines: {node: '>=0.4.7'} hasBin: true - happy-dom@17.4.4: - resolution: {integrity: sha512-/Pb0ctk3HTZ5xEL3BZ0hK1AqDSAUuRQitOmROPHhfUYEWpmTImwfD8vFDGADmMAX0JYgbcgxWoLFKtsWhcpuVA==} - engines: {node: '>=18.0.0'} + happy-dom@18.0.1: + resolution: {integrity: sha512-qn+rKOW7KWpVTtgIUi6RVmTBZJSe2k0Db0vh1f7CWrWclkkc7/Q+FrOfkZIb2eiErLyqu5AXEzE7XthO9JVxRA==} + engines: {node: '>=20.0.0'} has-bigints@1.1.0: resolution: {integrity: sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==} @@ -1772,13 +1861,14 @@ packages: hoist-non-react-statics@3.3.2: resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} - hosted-git-info@2.8.9: - resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} - ignore@5.3.2: resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} engines: {node: '>= 4'} + ignore@7.0.5: + resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} + engines: {node: '>= 4'} + import-fresh@3.3.1: resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} engines: {node: '>=6'} @@ -1791,6 +1881,10 @@ packages: resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} engines: {node: '>=8'} + indent-string@5.0.0: + resolution: {integrity: sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==} + engines: {node: '>=12'} + internal-slot@1.1.0: resolution: {integrity: sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==} engines: {node: '>= 0.4'} @@ -1818,9 +1912,9 @@ packages: resolution: {integrity: sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==} engines: {node: '>= 0.4'} - is-builtin-module@3.2.1: - resolution: {integrity: sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==} - engines: {node: '>=6'} + is-builtin-module@5.0.0: + resolution: {integrity: sha512-f4RqJKBUe5rQkJ2eJEJBXSticB3hGbN9j0yxxMQFqIW89Jp9WYFtzfTcRlstDKVUTRzSOTLKRfO9vIztenwtxA==} + engines: {node: '>=18.20'} is-callable@1.2.7: resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} @@ -1929,17 +2023,16 @@ packages: js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + js-tokens@9.0.1: + resolution: {integrity: sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==} + js-yaml@4.1.0: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true - jsesc@0.5.0: - resolution: {integrity: sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==} - hasBin: true - - jsesc@2.5.2: - resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} - engines: {node: '>=4'} + jsesc@3.0.2: + resolution: {integrity: sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==} + engines: {node: '>=6'} hasBin: true jsesc@3.1.0: @@ -1980,10 +2073,6 @@ packages: lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - locate-path@5.0.0: - resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} - engines: {node: '>=8'} - locate-path@6.0.0: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} @@ -1998,8 +2087,8 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true - loupe@3.1.3: - resolution: {integrity: sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==} + loupe@3.1.4: + resolution: {integrity: sha512-wJzkKwJrheKtknCOKNEtDK4iqg/MxmZheEMtSTYvnzRdEYaZzmgH976nenp8WdJRdx5Vc1X/9MO0Oszl6ezeXg==} lru-cache@10.4.3: resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} @@ -2107,9 +2196,6 @@ packages: node-releases@2.0.19: resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} - normalize-package-data@2.5.0: - resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} - normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} @@ -2158,26 +2244,14 @@ packages: resolution: {integrity: sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==} engines: {node: '>= 0.4'} - p-limit@2.3.0: - resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} - engines: {node: '>=6'} - p-limit@3.1.0: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} engines: {node: '>=10'} - p-locate@4.1.0: - resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} - engines: {node: '>=8'} - p-locate@5.0.0: resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} engines: {node: '>=10'} - p-try@2.2.0: - resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} - engines: {node: '>=6'} - package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} @@ -2238,6 +2312,10 @@ packages: resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} engines: {node: '>=12'} + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} @@ -2249,8 +2327,8 @@ packages: resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} engines: {node: '>= 0.4'} - postcss@8.5.3: - resolution: {integrity: sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==} + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} prelude-ls@1.2.1: @@ -2261,8 +2339,8 @@ packages: resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} engines: {node: '>=6.0.0'} - prettier@3.5.3: - resolution: {integrity: sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==} + prettier@3.6.2: + resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==} engines: {node: '>=14'} hasBin: true @@ -2297,13 +2375,13 @@ packages: peerDependencies: react: '>= 16.3.0' - react-dom@19.0.0: - resolution: {integrity: sha512-4GV5sHFG0e/0AD4X+ySy6UJd3jVl1iNsNHdpad0qhABJ11twS3TTBnseqsKurKcsNqCEFeGL3uLpVChpIO3QfQ==} + react-dom@19.1.1: + resolution: {integrity: sha512-Dlq/5LAZgF0Gaz6yiqZCf6VCcZs1ghAJyrsu84Q/GT0gV+mCxbfmKNoGRKBYMJ8IEdGPqu49YWXD02GCknEDkw==} peerDependencies: - react: ^19.0.0 + react: ^19.1.1 - react-hook-form@7.54.2: - resolution: {integrity: sha512-eHpAUgUjWbZocoQYUHposymRb4ZP6d0uwUnooL2uOybA9/3tPUvoAKqEWK1WaSiTxxOfTpffNZP7QwlnM3/gEg==} + react-hook-form@7.61.1: + resolution: {integrity: sha512-2vbXUFDYgqEgM2RcXcAT2PwDW/80QARi+PKmHy5q2KhuKvOlG8iIYgf7eIlIANR5trW9fJbP4r5aub3a4egsew==} engines: {node: '>=18.0.0'} peerDependencies: react: ^16.8.0 || ^17 || ^18 || ^19 @@ -2314,15 +2392,15 @@ packages: react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} - react-router-dom@7.4.0: - resolution: {integrity: sha512-VlksBPf3n2bijPvnA7nkTsXxMAKOj+bWp4R9c3i+bnwlSOFAGOkJkKhzy/OsRkWaBMICqcAl1JDzh9ZSOze9CA==} + react-router-dom@7.7.1: + resolution: {integrity: sha512-bavdk2BA5r3MYalGKZ01u8PGuDBloQmzpBZVhDLrOOv1N943Wq6dcM9GhB3x8b7AbqPMEezauv4PeGkAJfy7FQ==} engines: {node: '>=20.0.0'} peerDependencies: react: '>=18' react-dom: '>=18' - react-router@7.4.0: - resolution: {integrity: sha512-Y2g5ObjkvX3VFeVt+0CIPuYd9PpgqCslG7ASSIdN73LwA1nNWzcMLaoMRJfP3prZFI92svxFwbn7XkLJ+UPQ6A==} + react-router@7.7.1: + resolution: {integrity: sha512-jVKHXoWRIsD/qS6lvGveckwb862EekvapdHJN/cGmzw40KnJH5gg53ujOJ4qX6EKIK9LSBfFed/xiQ5yeXNrUA==} engines: {node: '>=20.0.0'} peerDependencies: react: '>=18' @@ -2331,18 +2409,10 @@ packages: react-dom: optional: true - react@19.0.0: - resolution: {integrity: sha512-V8AVnmPIICiWpGfm6GLzCR/W5FXLchHop40W4nXBmdlEceh16rCN8O8LNWm5bh5XUX91fh7KpA+W0TgMKmgTpQ==} + react@19.1.1: + resolution: {integrity: sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==} engines: {node: '>=0.10.0'} - read-pkg-up@7.0.1: - resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} - engines: {node: '>=8'} - - read-pkg@5.2.0: - resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} - engines: {node: '>=8'} - readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -2355,9 +2425,6 @@ packages: resolution: {integrity: sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==} engines: {node: '>= 0.4'} - regenerator-runtime@0.14.1: - resolution: {integrity: sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==} - regexp-tree@0.1.27: resolution: {integrity: sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==} hasBin: true @@ -2366,8 +2433,8 @@ packages: resolution: {integrity: sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==} engines: {node: '>= 0.4'} - regjsparser@0.10.0: - resolution: {integrity: sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA==} + regjsparser@0.12.0: + resolution: {integrity: sha512-cnE+y8bz4NhMjISKbgeVJtqNbtf5QpjZP+Bslo+UqkIt9QPnX9q095eiRRASJG1/tz6dlNr6Z5NsBiWYokp6EQ==} hasBin: true resolve-from@4.0.0: @@ -2387,8 +2454,8 @@ packages: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - rollup@4.39.0: - resolution: {integrity: sha512-thI8kNc02yNvnmJp8dr3fNWJ9tCONDhp6TV35X6HkKGGs9E6q7YWCHbe5vKiTa7TAiNcFEmXKj3X/pG2b3ci0g==} + rollup@4.46.2: + resolution: {integrity: sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -2407,19 +2474,15 @@ packages: resolution: {integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==} engines: {node: '>= 0.4'} - scheduler@0.25.0: - resolution: {integrity: sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA==} - - semver@5.7.2: - resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} - hasBin: true + scheduler@0.26.0: + resolution: {integrity: sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==} semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true - semver@7.7.1: - resolution: {integrity: sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==} + semver@7.7.2: + resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} engines: {node: '>=10'} hasBin: true @@ -2481,23 +2544,11 @@ packages: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} - spdx-correct@3.2.0: - resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==} - - spdx-exceptions@2.5.0: - resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==} - - spdx-expression-parse@3.0.1: - resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} - - spdx-license-ids@3.0.21: - resolution: {integrity: sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==} - stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} - std-env@3.8.1: - resolution: {integrity: sha512-vj5lIj3Mwf9D79hBkltk5qmkFI+biIKWS2IBxEyEU3AX1tUf7AoL8nSazCOiiqQsGKIq01SClsKEzweu34uwvA==} + std-env@3.9.0: + resolution: {integrity: sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==} string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} @@ -2542,10 +2593,17 @@ packages: resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} engines: {node: '>=8'} + strip-indent@4.0.0: + resolution: {integrity: sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==} + engines: {node: '>=12'} + strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} + strip-literal@3.0.0: + resolution: {integrity: sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==} + stylis@4.2.0: resolution: {integrity: sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw==} @@ -2557,8 +2615,8 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} - synckit@0.10.3: - resolution: {integrity: sha512-R1urvuyiTaWfeCggqEvpDJwAlDVdsT9NM+IP//Tk2x7qHCkSvBk/fwFgw/TLAHzZlrAnnazMcRw0ZD8HlYFTEQ==} + synckit@0.11.11: + resolution: {integrity: sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==} engines: {node: ^14.18.0 || >=16.0.0} tar@6.2.1: @@ -2571,22 +2629,22 @@ packages: tinyexec@0.3.2: resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} - tinypool@1.0.2: - resolution: {integrity: sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==} + tinyglobby@0.2.14: + resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} + engines: {node: '>=12.0.0'} + + tinypool@1.1.1: + resolution: {integrity: sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==} engines: {node: ^18.0.0 || >=20.0.0} tinyrainbow@2.0.0: resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==} engines: {node: '>=14.0.0'} - tinyspy@3.0.2: - resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==} + tinyspy@4.0.3: + resolution: {integrity: sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==} engines: {node: '>=14.0.0'} - to-fast-properties@2.0.0: - resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} - engines: {node: '>=4'} - to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} @@ -2603,21 +2661,10 @@ packages: tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} - turbo-stream@2.4.0: - resolution: {integrity: sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g==} - type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} - type-fest@0.6.0: - resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} - engines: {node: '>=8'} - - type-fest@0.8.1: - resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} - engines: {node: '>=8'} - typed-array-buffer@1.0.3: resolution: {integrity: sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==} engines: {node: '>= 0.4'} @@ -2634,15 +2681,15 @@ packages: resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} engines: {node: '>= 0.4'} - typescript-eslint@8.28.0: - resolution: {integrity: sha512-jfZtxJoHm59bvoCMYCe2BM0/baMswRhMmYhy+w6VfcyHrjxZ0OJe0tGasydCpIpA+A/WIJhTyZfb3EtwNC/kHQ==} + typescript-eslint@8.38.0: + resolution: {integrity: sha512-FsZlrYK6bPDGoLeZRuvx2v6qrM03I0U0SnfCLPs/XCCPCFD80xU9Pg09H/K+XFa68uJuZo7l/Xhs+eDRg2l3hg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - typescript@5.5.4: - resolution: {integrity: sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==} + typescript@5.8.3: + resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} engines: {node: '>=14.17'} hasBin: true @@ -2658,6 +2705,9 @@ packages: resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} engines: {node: '>= 0.4'} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + universal-cookie@8.0.1: resolution: {integrity: sha512-B6ks9FLLnP1UbPPcveOidfvB9pHjP+wekP2uRYB9YDfKVpvcjKgy1W5Zj+cEXJ9KTPnqOKGfVDQBmn8/YCQfRg==} @@ -2673,11 +2723,8 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - validate-npm-package-license@3.0.4: - resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} - - vite-node@3.0.9: - resolution: {integrity: sha512-w3Gdx7jDcuT9cNn9jExXgOyKmf5UOTb6WMHz8LGAm54eS1Elf5OuBhCxl6zJxGhEeIkgsE1WbHuoL0mj/UXqXg==} + vite-node@3.2.4: + resolution: {integrity: sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true @@ -2686,19 +2733,19 @@ packages: peerDependencies: vite: '>2.0.0-0' - vite@6.2.6: - resolution: {integrity: sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw==} - engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + vite@7.0.6: + resolution: {integrity: sha512-MHFiOENNBd+Bd9uvc8GEsIzdkn1JxMmEeYX35tI3fv0sJBUTfW5tQsoaOwuY4KhBI09A3dUJ/DXf2yxPVPUceg==} + engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: - '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + '@types/node': ^20.19.0 || >=22.12.0 jiti: '>=1.21.0' - less: '*' + less: ^4.0.0 lightningcss: ^1.21.0 - sass: '*' - sass-embedded: '*' - stylus: '*' - sugarss: '*' + sass: ^1.70.0 + sass-embedded: ^1.70.0 + stylus: '>=0.54.8' + sugarss: ^5.0.0 terser: ^5.16.0 tsx: ^4.8.1 yaml: ^2.4.2 @@ -2726,16 +2773,16 @@ packages: yaml: optional: true - vitest@3.0.9: - resolution: {integrity: sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==} + vitest@3.2.4: + resolution: {integrity: sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@types/debug': ^4.1.12 '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 - '@vitest/browser': 3.0.9 - '@vitest/ui': 3.0.9 + '@vitest/browser': 3.2.4 + '@vitest/ui': 3.2.4 happy-dom: '*' jsdom: '*' peerDependenciesMeta: @@ -2754,10 +2801,6 @@ packages: jsdom: optional: true - webidl-conversions@7.0.0: - resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} - engines: {node: '>=12'} - whatwg-mimetype@3.0.0: resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==} engines: {node: '>=12'} @@ -2816,17 +2859,17 @@ packages: snapshots: - '@7nohe/openapi-react-query-codegen@1.6.2(commander@12.1.0)(glob@10.4.5)(magicast@0.3.5)(ts-morph@22.0.0)(typescript@5.5.4)': + '@7nohe/openapi-react-query-codegen@1.6.2(commander@12.1.0)(glob@10.4.5)(magicast@0.3.5)(ts-morph@22.0.0)(typescript@5.8.3)': dependencies: - '@hey-api/openapi-ts': 0.52.0(magicast@0.3.5)(typescript@5.5.4) + '@hey-api/openapi-ts': 0.52.0(magicast@0.3.5)(typescript@5.8.3) commander: 12.1.0 glob: 10.4.5 ts-morph: 22.0.0 - typescript: 5.5.4 + typescript: 5.8.3 transitivePeerDependencies: - magicast - '@adobe/css-tools@4.4.2': {} + '@adobe/css-tools@4.4.3': {} '@apidevtools/json-schema-ref-parser@11.6.4': dependencies: @@ -2834,186 +2877,181 @@ snapshots: '@types/json-schema': 7.0.15 js-yaml: 4.1.0 - '@ark-ui/react@5.4.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': - dependencies: - '@internationalized/date': 3.7.0 - '@zag-js/accordion': 1.7.0 - '@zag-js/anatomy': 1.7.0 - '@zag-js/auto-resize': 1.7.0 - '@zag-js/avatar': 1.7.0 - '@zag-js/carousel': 1.7.0 - '@zag-js/checkbox': 1.7.0 - '@zag-js/clipboard': 1.7.0 - '@zag-js/collapsible': 1.7.0 - '@zag-js/collection': 1.7.0 - '@zag-js/color-picker': 1.7.0 - '@zag-js/color-utils': 1.7.0 - '@zag-js/combobox': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/date-picker': 1.7.0(@internationalized/date@3.7.0) - '@zag-js/date-utils': 1.7.0(@internationalized/date@3.7.0) - '@zag-js/dialog': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/editable': 1.7.0 - '@zag-js/file-upload': 1.7.0 - '@zag-js/file-utils': 1.7.0 - '@zag-js/focus-trap': 1.7.0 - '@zag-js/highlight-word': 1.7.0 - '@zag-js/hover-card': 1.7.0 - '@zag-js/i18n-utils': 1.7.0 - '@zag-js/menu': 1.7.0 - '@zag-js/number-input': 1.7.0 - '@zag-js/pagination': 1.7.0 - '@zag-js/pin-input': 1.7.0 - '@zag-js/popover': 1.7.0 - '@zag-js/presence': 1.7.0 - '@zag-js/progress': 1.7.0 - '@zag-js/qr-code': 1.7.0 - '@zag-js/radio-group': 1.7.0 - '@zag-js/rating-group': 1.7.0 - '@zag-js/react': 1.7.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0) - '@zag-js/select': 1.7.0 - '@zag-js/signature-pad': 1.7.0 - '@zag-js/slider': 1.7.0 - '@zag-js/splitter': 1.7.0 - '@zag-js/steps': 1.7.0 - '@zag-js/switch': 1.7.0 - '@zag-js/tabs': 1.7.0 - '@zag-js/tags-input': 1.7.0 - '@zag-js/time-picker': 1.7.0(@internationalized/date@3.7.0) - '@zag-js/timer': 1.7.0 - '@zag-js/toast': 1.7.0 - '@zag-js/toggle': 1.7.0 - '@zag-js/toggle-group': 1.7.0 - '@zag-js/tooltip': 1.7.0 - '@zag-js/tour': 1.7.0 - '@zag-js/tree-view': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - - '@babel/code-frame@7.26.2': - dependencies: - '@babel/helper-validator-identifier': 7.25.9 + '@ark-ui/react@5.18.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + dependencies: + '@internationalized/date': 3.8.2 + '@zag-js/accordion': 1.21.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/angle-slider': 1.21.0 + '@zag-js/auto-resize': 1.21.0 + '@zag-js/avatar': 1.21.0 + '@zag-js/carousel': 1.21.0 + '@zag-js/checkbox': 1.21.0 + '@zag-js/clipboard': 1.21.0 + '@zag-js/collapsible': 1.21.0 + '@zag-js/collection': 1.21.0 + '@zag-js/color-picker': 1.21.0 + '@zag-js/color-utils': 1.21.0 + '@zag-js/combobox': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/date-picker': 1.21.0(@internationalized/date@3.8.2) + '@zag-js/date-utils': 1.21.0(@internationalized/date@3.8.2) + '@zag-js/dialog': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/editable': 1.21.0 + '@zag-js/file-upload': 1.21.0 + '@zag-js/file-utils': 1.21.0 + '@zag-js/floating-panel': 1.21.0 + '@zag-js/focus-trap': 1.21.0 + '@zag-js/highlight-word': 1.21.0 + '@zag-js/hover-card': 1.21.0 + '@zag-js/i18n-utils': 1.21.0 + '@zag-js/json-tree-utils': 1.21.0 + '@zag-js/listbox': 1.21.0 + '@zag-js/menu': 1.21.0 + '@zag-js/number-input': 1.21.0 + '@zag-js/pagination': 1.21.0 + '@zag-js/password-input': 1.21.0 + '@zag-js/pin-input': 1.21.0 + '@zag-js/popover': 1.21.0 + '@zag-js/presence': 1.21.0 + '@zag-js/progress': 1.21.0 + '@zag-js/qr-code': 1.21.0 + '@zag-js/radio-group': 1.21.0 + '@zag-js/rating-group': 1.21.0 + '@zag-js/react': 1.21.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@zag-js/select': 1.21.0 + '@zag-js/signature-pad': 1.21.0 + '@zag-js/slider': 1.21.0 + '@zag-js/splitter': 1.21.0 + '@zag-js/steps': 1.21.0 + '@zag-js/switch': 1.21.0 + '@zag-js/tabs': 1.21.0 + '@zag-js/tags-input': 1.21.0 + '@zag-js/time-picker': 1.21.0(@internationalized/date@3.8.2) + '@zag-js/timer': 1.21.0 + '@zag-js/toast': 1.21.0 + '@zag-js/toggle': 1.21.0 + '@zag-js/toggle-group': 1.21.0 + '@zag-js/tooltip': 1.21.0 + '@zag-js/tour': 1.21.0 + '@zag-js/tree-view': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) + + '@babel/code-frame@7.27.1': + dependencies: + '@babel/helper-validator-identifier': 7.27.1 js-tokens: 4.0.0 picocolors: 1.1.1 - '@babel/generator@7.17.7': + '@babel/generator@7.27.1': dependencies: - '@babel/types': 7.27.0 - jsesc: 2.5.2 - source-map: 0.5.7 - - '@babel/generator@7.27.0': - dependencies: - '@babel/parser': 7.27.0 - '@babel/types': 7.27.0 + '@babel/parser': 7.27.1 + '@babel/types': 7.27.1 '@jridgewell/gen-mapping': 0.3.8 '@jridgewell/trace-mapping': 0.3.25 jsesc: 3.1.0 - '@babel/helper-environment-visitor@7.24.7': - dependencies: - '@babel/types': 7.27.0 - - '@babel/helper-function-name@7.24.7': + '@babel/generator@7.28.0': dependencies: - '@babel/template': 7.27.0 - '@babel/types': 7.27.0 + '@babel/parser': 7.28.0 + '@babel/types': 7.28.2 + '@jridgewell/gen-mapping': 0.3.12 + '@jridgewell/trace-mapping': 0.3.29 + jsesc: 3.1.0 - '@babel/helper-hoist-variables@7.24.7': - dependencies: - '@babel/types': 7.27.0 + '@babel/helper-globals@7.28.0': {} - '@babel/helper-module-imports@7.25.9': + '@babel/helper-module-imports@7.27.1': dependencies: - '@babel/traverse': 7.27.0 - '@babel/types': 7.27.0 + '@babel/traverse': 7.28.0 + '@babel/types': 7.28.2 transitivePeerDependencies: - supports-color - '@babel/helper-split-export-declaration@7.24.7': - dependencies: - '@babel/types': 7.27.0 - - '@babel/helper-string-parser@7.25.9': {} + '@babel/helper-string-parser@7.27.1': {} - '@babel/helper-validator-identifier@7.25.9': {} + '@babel/helper-validator-identifier@7.27.1': {} - '@babel/parser@7.27.0': + '@babel/parser@7.27.1': dependencies: - '@babel/types': 7.27.0 + '@babel/types': 7.27.1 - '@babel/runtime@7.26.10': + '@babel/parser@7.28.0': dependencies: - regenerator-runtime: 0.14.1 + '@babel/types': 7.28.2 - '@babel/runtime@7.27.0': + '@babel/runtime@7.27.1': {} + + '@babel/runtime@7.28.2': {} + + '@babel/template@7.27.1': dependencies: - regenerator-runtime: 0.14.1 + '@babel/code-frame': 7.27.1 + '@babel/parser': 7.27.1 + '@babel/types': 7.27.1 - '@babel/template@7.27.0': + '@babel/template@7.27.2': dependencies: - '@babel/code-frame': 7.26.2 - '@babel/parser': 7.27.0 - '@babel/types': 7.27.0 + '@babel/code-frame': 7.27.1 + '@babel/parser': 7.28.0 + '@babel/types': 7.28.2 - '@babel/traverse@7.23.2': + '@babel/traverse@7.27.1': dependencies: - '@babel/code-frame': 7.26.2 - '@babel/generator': 7.27.0 - '@babel/helper-environment-visitor': 7.24.7 - '@babel/helper-function-name': 7.24.7 - '@babel/helper-hoist-variables': 7.24.7 - '@babel/helper-split-export-declaration': 7.24.7 - '@babel/parser': 7.27.0 - '@babel/types': 7.27.0 - debug: 4.4.0 + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.27.1 + '@babel/parser': 7.27.1 + '@babel/template': 7.27.1 + '@babel/types': 7.27.1 + debug: 4.4.1 globals: 11.12.0 transitivePeerDependencies: - supports-color - '@babel/traverse@7.27.0': + '@babel/traverse@7.28.0': dependencies: - '@babel/code-frame': 7.26.2 - '@babel/generator': 7.27.0 - '@babel/parser': 7.27.0 - '@babel/template': 7.27.0 - '@babel/types': 7.27.0 - debug: 4.4.0 - globals: 11.12.0 + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.28.0 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.28.0 + '@babel/template': 7.27.2 + '@babel/types': 7.28.2 + debug: 4.4.1 transitivePeerDependencies: - supports-color - '@babel/types@7.17.0': + '@babel/types@7.27.1': dependencies: - '@babel/helper-validator-identifier': 7.25.9 - to-fast-properties: 2.0.0 + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 - '@babel/types@7.27.0': + '@babel/types@7.28.2': dependencies: - '@babel/helper-string-parser': 7.25.9 - '@babel/helper-validator-identifier': 7.25.9 + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 - '@chakra-ui/react@3.15.0(@emotion/react@11.14.0(@types/react@18.3.20)(react@19.0.0))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': + '@chakra-ui/react@3.24.0(@emotion/react@11.14.0(@types/react@19.1.9)(react@19.1.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@ark-ui/react': 5.4.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + '@ark-ui/react': 5.18.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1) '@emotion/is-prop-valid': 1.3.1 - '@emotion/react': 11.14.0(@types/react@18.3.20)(react@19.0.0) + '@emotion/react': 11.14.0(@types/react@19.1.9)(react@19.1.1) '@emotion/serialize': 1.3.3 - '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.0.0) + '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.1.1) '@emotion/utils': 1.4.2 - '@pandacss/is-valid-prop': 0.41.0 + '@pandacss/is-valid-prop': 0.54.0 csstype: 3.1.3 fast-safe-stringify: 2.1.1 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) '@emotion/babel-plugin@11.13.5': dependencies: - '@babel/helper-module-imports': 7.25.9 - '@babel/runtime': 7.27.0 + '@babel/helper-module-imports': 7.27.1 + '@babel/runtime': 7.28.2 '@emotion/hash': 0.9.2 '@emotion/memoize': 0.9.0 '@emotion/serialize': 1.3.3 @@ -3042,19 +3080,19 @@ snapshots: '@emotion/memoize@0.9.0': {} - '@emotion/react@11.14.0(@types/react@18.3.20)(react@19.0.0)': + '@emotion/react@11.14.0(@types/react@19.1.9)(react@19.1.1)': dependencies: - '@babel/runtime': 7.27.0 + '@babel/runtime': 7.28.2 '@emotion/babel-plugin': 11.13.5 '@emotion/cache': 11.14.0 '@emotion/serialize': 1.3.3 - '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.0.0) + '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.1.1) '@emotion/utils': 1.4.2 '@emotion/weak-memoize': 0.4.0 hoist-non-react-statics: 3.3.2 - react: 19.0.0 + react: 19.1.1 optionalDependencies: - '@types/react': 18.3.20 + '@types/react': 19.1.9 transitivePeerDependencies: - supports-color @@ -3070,119 +3108,122 @@ snapshots: '@emotion/unitless@0.10.0': {} - '@emotion/use-insertion-effect-with-fallbacks@1.2.0(react@19.0.0)': + '@emotion/use-insertion-effect-with-fallbacks@1.2.0(react@19.1.1)': dependencies: - react: 19.0.0 + react: 19.1.1 '@emotion/utils@1.4.2': {} '@emotion/weak-memoize@0.4.0': {} - '@esbuild/aix-ppc64@0.25.2': + '@esbuild/aix-ppc64@0.25.8': + optional: true + + '@esbuild/android-arm64@0.25.8': optional: true - '@esbuild/android-arm64@0.25.2': + '@esbuild/android-arm@0.25.8': optional: true - '@esbuild/android-arm@0.25.2': + '@esbuild/android-x64@0.25.8': optional: true - '@esbuild/android-x64@0.25.2': + '@esbuild/darwin-arm64@0.25.8': optional: true - '@esbuild/darwin-arm64@0.25.2': + '@esbuild/darwin-x64@0.25.8': optional: true - '@esbuild/darwin-x64@0.25.2': + '@esbuild/freebsd-arm64@0.25.8': optional: true - '@esbuild/freebsd-arm64@0.25.2': + '@esbuild/freebsd-x64@0.25.8': optional: true - '@esbuild/freebsd-x64@0.25.2': + '@esbuild/linux-arm64@0.25.8': optional: true - '@esbuild/linux-arm64@0.25.2': + '@esbuild/linux-arm@0.25.8': optional: true - '@esbuild/linux-arm@0.25.2': + '@esbuild/linux-ia32@0.25.8': optional: true - '@esbuild/linux-ia32@0.25.2': + '@esbuild/linux-loong64@0.25.8': optional: true - '@esbuild/linux-loong64@0.25.2': + '@esbuild/linux-mips64el@0.25.8': optional: true - '@esbuild/linux-mips64el@0.25.2': + '@esbuild/linux-ppc64@0.25.8': optional: true - '@esbuild/linux-ppc64@0.25.2': + '@esbuild/linux-riscv64@0.25.8': optional: true - '@esbuild/linux-riscv64@0.25.2': + '@esbuild/linux-s390x@0.25.8': optional: true - '@esbuild/linux-s390x@0.25.2': + '@esbuild/linux-x64@0.25.8': optional: true - '@esbuild/linux-x64@0.25.2': + '@esbuild/netbsd-arm64@0.25.8': optional: true - '@esbuild/netbsd-arm64@0.25.2': + '@esbuild/netbsd-x64@0.25.8': optional: true - '@esbuild/netbsd-x64@0.25.2': + '@esbuild/openbsd-arm64@0.25.8': optional: true - '@esbuild/openbsd-arm64@0.25.2': + '@esbuild/openbsd-x64@0.25.8': optional: true - '@esbuild/openbsd-x64@0.25.2': + '@esbuild/openharmony-arm64@0.25.8': optional: true - '@esbuild/sunos-x64@0.25.2': + '@esbuild/sunos-x64@0.25.8': optional: true - '@esbuild/win32-arm64@0.25.2': + '@esbuild/win32-arm64@0.25.8': optional: true - '@esbuild/win32-ia32@0.25.2': + '@esbuild/win32-ia32@0.25.8': optional: true - '@esbuild/win32-x64@0.25.2': + '@esbuild/win32-x64@0.25.8': optional: true - '@eslint-community/eslint-utils@4.5.1(eslint@9.23.0(jiti@1.21.7))': + '@eslint-community/eslint-utils@4.7.0(eslint@9.32.0(jiti@1.21.7))': dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.32.0(jiti@1.21.7) eslint-visitor-keys: 3.4.3 '@eslint-community/regexpp@4.12.1': {} - '@eslint/compat@1.2.7(eslint@9.23.0(jiti@1.21.7))': + '@eslint/compat@1.3.1(eslint@9.32.0(jiti@1.21.7))': optionalDependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.32.0(jiti@1.21.7) - '@eslint/config-array@0.19.2': + '@eslint/config-array@0.21.0': dependencies: '@eslint/object-schema': 2.1.6 - debug: 4.4.0 + debug: 4.4.1 minimatch: 3.1.2 transitivePeerDependencies: - supports-color - '@eslint/config-helpers@0.2.0': {} + '@eslint/config-helpers@0.3.0': {} - '@eslint/core@0.12.0': + '@eslint/core@0.15.1': dependencies: '@types/json-schema': 7.0.15 '@eslint/eslintrc@3.3.1': dependencies: ajv: 6.12.6 - debug: 4.4.0 - espree: 10.3.0 + debug: 4.4.1 + espree: 10.4.0 globals: 14.0.0 ignore: 5.3.2 import-fresh: 3.3.1 @@ -3192,34 +3233,34 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/js@9.23.0': {} + '@eslint/js@9.32.0': {} '@eslint/object-schema@2.1.6': {} - '@eslint/plugin-kit@0.2.7': + '@eslint/plugin-kit@0.3.4': dependencies: - '@eslint/core': 0.12.0 + '@eslint/core': 0.15.1 levn: 0.4.1 - '@floating-ui/core@1.6.9': + '@floating-ui/core@1.7.3': dependencies: - '@floating-ui/utils': 0.2.9 + '@floating-ui/utils': 0.2.10 - '@floating-ui/dom@1.6.13': + '@floating-ui/dom@1.7.2': dependencies: - '@floating-ui/core': 1.6.9 - '@floating-ui/utils': 0.2.9 + '@floating-ui/core': 1.7.3 + '@floating-ui/utils': 0.2.10 - '@floating-ui/utils@0.2.9': {} + '@floating-ui/utils@0.2.10': {} - '@hey-api/openapi-ts@0.52.0(magicast@0.3.5)(typescript@5.5.4)': + '@hey-api/openapi-ts@0.52.0(magicast@0.3.5)(typescript@5.8.3)': dependencies: '@apidevtools/json-schema-ref-parser': 11.6.4 c12: 1.11.1(magicast@0.3.5) camelcase: 8.0.0 commander: 12.1.0 handlebars: 4.7.8 - typescript: 5.5.4 + typescript: 5.8.3 transitivePeerDependencies: - magicast @@ -3234,15 +3275,15 @@ snapshots: '@humanwhocodes/retry@0.3.1': {} - '@humanwhocodes/retry@0.4.2': {} + '@humanwhocodes/retry@0.4.3': {} - '@internationalized/date@3.7.0': + '@internationalized/date@3.8.2': dependencies: - '@swc/helpers': 0.5.15 + '@swc/helpers': 0.5.17 - '@internationalized/number@3.6.0': + '@internationalized/number@3.6.3': dependencies: - '@swc/helpers': 0.5.15 + '@swc/helpers': 0.5.17 '@isaacs/cliui@8.0.2': dependencies: @@ -3253,6 +3294,11 @@ snapshots: wrap-ansi: 8.1.0 wrap-ansi-cjs: wrap-ansi@7.0.0 + '@jridgewell/gen-mapping@0.3.12': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.4 + '@jridgewell/trace-mapping': 0.3.29 + '@jridgewell/gen-mapping@0.3.8': dependencies: '@jridgewell/set-array': 1.2.1 @@ -3265,11 +3311,18 @@ snapshots: '@jridgewell/sourcemap-codec@1.5.0': {} + '@jridgewell/sourcemap-codec@1.5.4': {} + '@jridgewell/trace-mapping@0.3.25': dependencies: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/trace-mapping@0.3.29': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.4 + '@jsdevtools/ono@7.1.3': {} '@nodelib/fs.scandir@2.1.5': @@ -3284,153 +3337,153 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.19.1 - '@pandacss/is-valid-prop@0.41.0': {} + '@pandacss/is-valid-prop@0.54.0': {} '@pkgjs/parseargs@0.11.0': optional: true - '@pkgr/core@0.2.0': {} + '@pkgr/core@0.2.9': {} + + '@rolldown/pluginutils@1.0.0-beta.27': {} - '@rollup/rollup-android-arm-eabi@4.39.0': + '@rollup/rollup-android-arm-eabi@4.46.2': optional: true - '@rollup/rollup-android-arm64@4.39.0': + '@rollup/rollup-android-arm64@4.46.2': optional: true - '@rollup/rollup-darwin-arm64@4.39.0': + '@rollup/rollup-darwin-arm64@4.46.2': optional: true - '@rollup/rollup-darwin-x64@4.39.0': + '@rollup/rollup-darwin-x64@4.46.2': optional: true - '@rollup/rollup-freebsd-arm64@4.39.0': + '@rollup/rollup-freebsd-arm64@4.46.2': optional: true - '@rollup/rollup-freebsd-x64@4.39.0': + '@rollup/rollup-freebsd-x64@4.46.2': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.39.0': + '@rollup/rollup-linux-arm-gnueabihf@4.46.2': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.39.0': + '@rollup/rollup-linux-arm-musleabihf@4.46.2': optional: true - '@rollup/rollup-linux-arm64-gnu@4.39.0': + '@rollup/rollup-linux-arm64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-arm64-musl@4.39.0': + '@rollup/rollup-linux-arm64-musl@4.46.2': optional: true - '@rollup/rollup-linux-loongarch64-gnu@4.39.0': + '@rollup/rollup-linux-loongarch64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-powerpc64le-gnu@4.39.0': + '@rollup/rollup-linux-ppc64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.39.0': + '@rollup/rollup-linux-riscv64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-riscv64-musl@4.39.0': + '@rollup/rollup-linux-riscv64-musl@4.46.2': optional: true - '@rollup/rollup-linux-s390x-gnu@4.39.0': + '@rollup/rollup-linux-s390x-gnu@4.46.2': optional: true - '@rollup/rollup-linux-x64-gnu@4.39.0': + '@rollup/rollup-linux-x64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-x64-musl@4.39.0': + '@rollup/rollup-linux-x64-musl@4.46.2': optional: true - '@rollup/rollup-win32-arm64-msvc@4.39.0': + '@rollup/rollup-win32-arm64-msvc@4.46.2': optional: true - '@rollup/rollup-win32-ia32-msvc@4.39.0': + '@rollup/rollup-win32-ia32-msvc@4.46.2': optional: true - '@rollup/rollup-win32-x64-msvc@4.39.0': + '@rollup/rollup-win32-x64-msvc@4.46.2': optional: true - '@stylistic/eslint-plugin@2.13.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@stylistic/eslint-plugin@5.2.2(eslint@9.32.0(jiti@1.21.7))': dependencies: - '@typescript-eslint/utils': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) - eslint-visitor-keys: 4.2.0 - espree: 10.3.0 + '@eslint-community/eslint-utils': 4.7.0(eslint@9.32.0(jiti@1.21.7)) + '@typescript-eslint/types': 8.38.0 + eslint: 9.32.0(jiti@1.21.7) + eslint-visitor-keys: 4.2.1 + espree: 10.4.0 estraverse: 5.3.0 - picomatch: 4.0.2 - transitivePeerDependencies: - - supports-color - - typescript + picomatch: 4.0.3 - '@swc/core-darwin-arm64@1.11.12': + '@swc/core-darwin-arm64@1.13.1': optional: true - '@swc/core-darwin-x64@1.11.12': + '@swc/core-darwin-x64@1.13.1': optional: true - '@swc/core-linux-arm-gnueabihf@1.11.12': + '@swc/core-linux-arm-gnueabihf@1.13.1': optional: true - '@swc/core-linux-arm64-gnu@1.11.12': + '@swc/core-linux-arm64-gnu@1.13.1': optional: true - '@swc/core-linux-arm64-musl@1.11.12': + '@swc/core-linux-arm64-musl@1.13.1': optional: true - '@swc/core-linux-x64-gnu@1.11.12': + '@swc/core-linux-x64-gnu@1.13.1': optional: true - '@swc/core-linux-x64-musl@1.11.12': + '@swc/core-linux-x64-musl@1.13.1': optional: true - '@swc/core-win32-arm64-msvc@1.11.12': + '@swc/core-win32-arm64-msvc@1.13.1': optional: true - '@swc/core-win32-ia32-msvc@1.11.12': + '@swc/core-win32-ia32-msvc@1.13.1': optional: true - '@swc/core-win32-x64-msvc@1.11.12': + '@swc/core-win32-x64-msvc@1.13.1': optional: true - '@swc/core@1.11.12(@swc/helpers@0.5.15)': + '@swc/core@1.13.1(@swc/helpers@0.5.17)': dependencies: '@swc/counter': 0.1.3 - '@swc/types': 0.1.19 + '@swc/types': 0.1.23 optionalDependencies: - '@swc/core-darwin-arm64': 1.11.12 - '@swc/core-darwin-x64': 1.11.12 - '@swc/core-linux-arm-gnueabihf': 1.11.12 - '@swc/core-linux-arm64-gnu': 1.11.12 - '@swc/core-linux-arm64-musl': 1.11.12 - '@swc/core-linux-x64-gnu': 1.11.12 - '@swc/core-linux-x64-musl': 1.11.12 - '@swc/core-win32-arm64-msvc': 1.11.12 - '@swc/core-win32-ia32-msvc': 1.11.12 - '@swc/core-win32-x64-msvc': 1.11.12 - '@swc/helpers': 0.5.15 + '@swc/core-darwin-arm64': 1.13.1 + '@swc/core-darwin-x64': 1.13.1 + '@swc/core-linux-arm-gnueabihf': 1.13.1 + '@swc/core-linux-arm64-gnu': 1.13.1 + '@swc/core-linux-arm64-musl': 1.13.1 + '@swc/core-linux-x64-gnu': 1.13.1 + '@swc/core-linux-x64-musl': 1.13.1 + '@swc/core-win32-arm64-msvc': 1.13.1 + '@swc/core-win32-ia32-msvc': 1.13.1 + '@swc/core-win32-x64-msvc': 1.13.1 + '@swc/helpers': 0.5.17 '@swc/counter@0.1.3': {} - '@swc/helpers@0.5.15': + '@swc/helpers@0.5.17': dependencies: tslib: 2.8.1 - '@swc/types@0.1.19': + '@swc/types@0.1.23': dependencies: '@swc/counter': 0.1.3 - '@tanstack/query-core@5.71.1': {} + '@tanstack/query-core@5.83.1': {} - '@tanstack/react-query@5.71.1(react@19.0.0)': + '@tanstack/react-query@5.84.1(react@19.1.1)': dependencies: - '@tanstack/query-core': 5.71.1 - react: 19.0.0 + '@tanstack/query-core': 5.83.1 + react: 19.1.1 '@testing-library/dom@10.4.0': dependencies: - '@babel/code-frame': 7.26.2 - '@babel/runtime': 7.27.0 + '@babel/code-frame': 7.27.1 + '@babel/runtime': 7.28.2 '@types/aria-query': 5.0.4 aria-query: 5.3.0 chalk: 4.1.2 @@ -3438,35 +3491,35 @@ snapshots: lz-string: 1.5.0 pretty-format: 27.5.1 - '@testing-library/jest-dom@6.6.3': + '@testing-library/jest-dom@6.6.4': dependencies: - '@adobe/css-tools': 4.4.2 + '@adobe/css-tools': 4.4.3 aria-query: 5.3.2 - chalk: 3.0.0 css.escape: 1.5.1 dom-accessibility-api: 0.6.3 lodash: 4.17.21 + picocolors: 1.1.1 redent: 3.0.0 - '@testing-library/react@16.2.0(@testing-library/dom@10.4.0)(@types/react-dom@19.0.0)(@types/react@18.3.20)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': + '@testing-library/react@16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@19.1.7(@types/react@19.1.9))(@types/react@19.1.9)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@babel/runtime': 7.26.10 + '@babel/runtime': 7.27.1 '@testing-library/dom': 10.4.0 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) optionalDependencies: - '@types/react': 18.3.20 - '@types/react-dom': 19.0.0 + '@types/react': 19.1.9 + '@types/react-dom': 19.1.7(@types/react@19.1.9) - '@trivago/prettier-plugin-sort-imports@4.3.0(prettier@3.5.3)': + '@trivago/prettier-plugin-sort-imports@5.2.2(prettier@3.6.2)': dependencies: - '@babel/generator': 7.17.7 - '@babel/parser': 7.27.0 - '@babel/traverse': 7.23.2 - '@babel/types': 7.17.0 + '@babel/generator': 7.27.1 + '@babel/parser': 7.27.1 + '@babel/traverse': 7.27.1 + '@babel/types': 7.27.1 javascript-natural-sort: 0.7.1 lodash: 4.17.21 - prettier: 3.5.3 + prettier: 3.6.2 transitivePeerDependencies: - supports-color @@ -3479,630 +3532,738 @@ snapshots: '@types/aria-query@5.0.4': {} - '@types/cookie@0.6.0': {} + '@types/chai@5.2.2': + dependencies: + '@types/deep-eql': 4.0.2 - '@types/estree@1.0.6': {} + '@types/deep-eql@4.0.2': {} - '@types/estree@1.0.7': {} + '@types/estree@1.0.8': {} '@types/hoist-non-react-statics@3.3.6': dependencies: - '@types/react': 18.3.20 + '@types/react': 19.1.9 hoist-non-react-statics: 3.3.2 '@types/json-schema@7.0.15': {} - '@types/normalize-package-data@2.4.4': {} + '@types/node@20.19.1': + dependencies: + undici-types: 6.21.0 '@types/parse-json@4.0.2': {} - '@types/prop-types@15.7.14': {} - - '@types/react-dom@19.0.0': + '@types/react-dom@19.1.7(@types/react@19.1.9)': dependencies: - '@types/react': 18.3.20 + '@types/react': 19.1.9 - '@types/react@18.3.20': + '@types/react@19.1.9': dependencies: - '@types/prop-types': 15.7.14 csstype: 3.1.3 - '@typescript-eslint/eslint-plugin@8.28.0(@typescript-eslint/parser@8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4))(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@types/whatwg-mimetype@3.0.2': {} + + '@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/scope-manager': 8.28.0 - '@typescript-eslint/type-utils': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/utils': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/visitor-keys': 8.28.0 - eslint: 9.23.0(jiti@1.21.7) + '@typescript-eslint/parser': 8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/scope-manager': 8.38.0 + '@typescript-eslint/type-utils': 8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.38.0 + eslint: 9.32.0(jiti@1.21.7) graphemer: 1.4.0 - ignore: 5.3.2 + ignore: 7.0.5 natural-compare: 1.4.0 - ts-api-utils: 2.1.0(typescript@5.5.4) - typescript: 5.5.4 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/parser@8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@typescript-eslint/scope-manager': 8.28.0 - '@typescript-eslint/types': 8.28.0 - '@typescript-eslint/typescript-estree': 8.28.0(typescript@5.5.4) - '@typescript-eslint/visitor-keys': 8.28.0 - debug: 4.4.0 - eslint: 9.23.0(jiti@1.21.7) - typescript: 5.5.4 + '@typescript-eslint/scope-manager': 8.38.0 + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.38.0 + debug: 4.4.1 + eslint: 9.32.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.28.0': + '@typescript-eslint/project-service@8.34.1(typescript@5.8.3)': dependencies: - '@typescript-eslint/types': 8.28.0 - '@typescript-eslint/visitor-keys': 8.28.0 + '@typescript-eslint/tsconfig-utils': 8.34.1(typescript@5.8.3) + '@typescript-eslint/types': 8.34.1 + debug: 4.4.1 + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color - '@typescript-eslint/type-utils@8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/project-service@8.38.0(typescript@5.8.3)': dependencies: - '@typescript-eslint/typescript-estree': 8.28.0(typescript@5.5.4) - '@typescript-eslint/utils': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - debug: 4.4.0 - eslint: 9.23.0(jiti@1.21.7) - ts-api-utils: 2.1.0(typescript@5.5.4) - typescript: 5.5.4 + '@typescript-eslint/tsconfig-utils': 8.38.0(typescript@5.8.3) + '@typescript-eslint/types': 8.38.0 + debug: 4.4.1 + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/types@8.28.0': {} + '@typescript-eslint/scope-manager@8.34.1': + dependencies: + '@typescript-eslint/types': 8.34.1 + '@typescript-eslint/visitor-keys': 8.34.1 + + '@typescript-eslint/scope-manager@8.38.0': + dependencies: + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/visitor-keys': 8.38.0 - '@typescript-eslint/typescript-estree@8.28.0(typescript@5.5.4)': + '@typescript-eslint/tsconfig-utils@8.34.1(typescript@5.8.3)': dependencies: - '@typescript-eslint/types': 8.28.0 - '@typescript-eslint/visitor-keys': 8.28.0 - debug: 4.4.0 + typescript: 5.8.3 + + '@typescript-eslint/tsconfig-utils@8.38.0(typescript@5.8.3)': + dependencies: + typescript: 5.8.3 + + '@typescript-eslint/type-utils@8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3)': + dependencies: + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + debug: 4.4.1 + eslint: 9.32.0(jiti@1.21.7) + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@8.34.1': {} + + '@typescript-eslint/types@8.38.0': {} + + '@typescript-eslint/typescript-estree@8.34.1(typescript@5.8.3)': + dependencies: + '@typescript-eslint/project-service': 8.34.1(typescript@5.8.3) + '@typescript-eslint/tsconfig-utils': 8.34.1(typescript@5.8.3) + '@typescript-eslint/types': 8.34.1 + '@typescript-eslint/visitor-keys': 8.34.1 + debug: 4.4.1 + fast-glob: 3.3.3 + is-glob: 4.0.3 + minimatch: 9.0.5 + semver: 7.7.2 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/typescript-estree@8.38.0(typescript@5.8.3)': + dependencies: + '@typescript-eslint/project-service': 8.38.0(typescript@5.8.3) + '@typescript-eslint/tsconfig-utils': 8.38.0(typescript@5.8.3) + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/visitor-keys': 8.38.0 + debug: 4.4.1 fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.1 - ts-api-utils: 2.1.0(typescript@5.5.4) - typescript: 5.5.4 + semver: 7.7.2 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.34.1(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3)': + dependencies: + '@eslint-community/eslint-utils': 4.7.0(eslint@9.32.0(jiti@1.21.7)) + '@typescript-eslint/scope-manager': 8.34.1 + '@typescript-eslint/types': 8.34.1 + '@typescript-eslint/typescript-estree': 8.34.1(typescript@5.8.3) + eslint: 9.32.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/utils@8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@eslint-community/eslint-utils': 4.5.1(eslint@9.23.0(jiti@1.21.7)) - '@typescript-eslint/scope-manager': 8.28.0 - '@typescript-eslint/types': 8.28.0 - '@typescript-eslint/typescript-estree': 8.28.0(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) - typescript: 5.5.4 + '@eslint-community/eslint-utils': 4.7.0(eslint@9.32.0(jiti@1.21.7)) + '@typescript-eslint/scope-manager': 8.38.0 + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + eslint: 9.32.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.28.0': + '@typescript-eslint/visitor-keys@8.34.1': + dependencies: + '@typescript-eslint/types': 8.34.1 + eslint-visitor-keys: 4.2.1 + + '@typescript-eslint/visitor-keys@8.38.0': dependencies: - '@typescript-eslint/types': 8.28.0 - eslint-visitor-keys: 4.2.0 + '@typescript-eslint/types': 8.38.0 + eslint-visitor-keys: 4.2.1 - '@vitejs/plugin-react-swc@3.8.1(@swc/helpers@0.5.15)(vite@6.2.6(jiti@1.21.7))': + '@vitejs/plugin-react-swc@3.11.0(@swc/helpers@0.5.17)(vite@7.0.6(@types/node@20.19.1)(jiti@1.21.7))': dependencies: - '@swc/core': 1.11.12(@swc/helpers@0.5.15) - vite: 6.2.6(jiti@1.21.7) + '@rolldown/pluginutils': 1.0.0-beta.27 + '@swc/core': 1.13.1(@swc/helpers@0.5.17) + vite: 7.0.6(@types/node@20.19.1)(jiti@1.21.7) transitivePeerDependencies: - '@swc/helpers' - '@vitest/expect@3.0.9': + '@vitest/expect@3.2.4': dependencies: - '@vitest/spy': 3.0.9 - '@vitest/utils': 3.0.9 + '@types/chai': 5.2.2 + '@vitest/spy': 3.2.4 + '@vitest/utils': 3.2.4 chai: 5.2.0 tinyrainbow: 2.0.0 - '@vitest/mocker@3.0.9(vite@6.2.6(jiti@1.21.7))': + '@vitest/mocker@3.2.4(vite@7.0.6(@types/node@20.19.1)(jiti@1.21.7))': dependencies: - '@vitest/spy': 3.0.9 + '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.17 optionalDependencies: - vite: 6.2.6(jiti@1.21.7) + vite: 7.0.6(@types/node@20.19.1)(jiti@1.21.7) - '@vitest/pretty-format@3.0.9': + '@vitest/pretty-format@3.2.4': dependencies: tinyrainbow: 2.0.0 - '@vitest/runner@3.0.9': + '@vitest/runner@3.2.4': dependencies: - '@vitest/utils': 3.0.9 + '@vitest/utils': 3.2.4 pathe: 2.0.3 + strip-literal: 3.0.0 - '@vitest/snapshot@3.0.9': + '@vitest/snapshot@3.2.4': dependencies: - '@vitest/pretty-format': 3.0.9 + '@vitest/pretty-format': 3.2.4 magic-string: 0.30.17 pathe: 2.0.3 - '@vitest/spy@3.0.9': + '@vitest/spy@3.2.4': dependencies: - tinyspy: 3.0.2 + tinyspy: 4.0.3 - '@vitest/utils@3.0.9': + '@vitest/utils@3.2.4': dependencies: - '@vitest/pretty-format': 3.0.9 - loupe: 3.1.3 + '@vitest/pretty-format': 3.2.4 + loupe: 3.1.4 tinyrainbow: 2.0.0 - '@zag-js/accordion@1.7.0': + '@zag-js/accordion@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/anatomy@1.7.0': {} + '@zag-js/anatomy@1.21.0': {} - '@zag-js/aria-hidden@1.7.0': {} - - '@zag-js/auto-resize@1.7.0': + '@zag-js/angle-slider@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/rect-utils': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/aria-hidden@1.21.0': {} - '@zag-js/avatar@1.7.0': + '@zag-js/auto-resize@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/dom-query': 1.21.0 - '@zag-js/carousel@1.7.0': + '@zag-js/avatar@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/scroll-snap': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/checkbox@1.7.0': + '@zag-js/carousel@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/focus-visible': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/scroll-snap': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/clipboard@1.7.0': + '@zag-js/checkbox@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-visible': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/collapsible@1.7.0': + '@zag-js/clipboard@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/collection@1.7.0': + '@zag-js/collapsible@1.21.0': dependencies: - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/color-picker@1.7.0': + '@zag-js/collection@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/color-utils': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/utils': 1.21.0 - '@zag-js/color-utils@1.7.0': + '@zag-js/color-picker@1.21.0': dependencies: - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/color-utils': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/combobox@1.7.0': + '@zag-js/color-utils@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/aria-hidden': 1.7.0 - '@zag-js/collection': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/utils': 1.21.0 - '@zag-js/core@1.7.0': + '@zag-js/combobox@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/aria-hidden': 1.21.0 + '@zag-js/collection': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/date-picker@1.7.0(@internationalized/date@3.7.0)': + '@zag-js/core@1.21.0': dependencies: - '@internationalized/date': 3.7.0 - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/date-utils': 1.7.0(@internationalized/date@3.7.0) - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/live-region': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/date-utils@1.7.0(@internationalized/date@3.7.0)': + '@zag-js/date-picker@1.21.0(@internationalized/date@3.8.2)': dependencies: - '@internationalized/date': 3.7.0 + '@internationalized/date': 3.8.2 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/date-utils': 1.21.0(@internationalized/date@3.8.2) + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/live-region': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/dialog@1.7.0': + '@zag-js/date-utils@1.21.0(@internationalized/date@3.8.2)': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/aria-hidden': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/focus-trap': 1.7.0 - '@zag-js/remove-scroll': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@internationalized/date': 3.8.2 - '@zag-js/dismissable@1.7.0': + '@zag-js/dialog@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 - '@zag-js/interact-outside': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/aria-hidden': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-trap': 1.21.0 + '@zag-js/remove-scroll': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/dom-query@1.7.0': + '@zag-js/dismissable@1.21.0': dependencies: - '@zag-js/types': 1.7.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/interact-outside': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/editable@1.7.0': + '@zag-js/dom-query@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/interact-outside': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/types': 1.21.0 - '@zag-js/element-rect@1.7.0': {} + '@zag-js/editable@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/interact-outside': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/element-size@1.7.0': {} + '@zag-js/file-upload@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/file-utils': 1.21.0 + '@zag-js/i18n-utils': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/file-upload@1.7.0': + '@zag-js/file-utils@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/file-utils': 1.7.0 - '@zag-js/i18n-utils': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/i18n-utils': 1.21.0 - '@zag-js/file-utils@1.7.0': + '@zag-js/floating-panel@1.21.0': dependencies: - '@zag-js/i18n-utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/rect-utils': 1.21.0 + '@zag-js/store': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/focus-trap@1.7.0': + '@zag-js/focus-trap@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 + '@zag-js/dom-query': 1.21.0 - '@zag-js/focus-visible@1.7.0': + '@zag-js/focus-visible@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 + '@zag-js/dom-query': 1.21.0 - '@zag-js/highlight-word@1.7.0': {} + '@zag-js/highlight-word@1.21.0': {} - '@zag-js/hover-card@1.7.0': + '@zag-js/hover-card@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/i18n-utils@1.7.0': + '@zag-js/i18n-utils@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 + '@zag-js/dom-query': 1.21.0 + + '@zag-js/interact-outside@1.21.0': + dependencies: + '@zag-js/dom-query': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/json-tree-utils@1.21.0': {} - '@zag-js/interact-outside@1.7.0': + '@zag-js/listbox@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/collection': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-visible': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/live-region@1.7.0': {} + '@zag-js/live-region@1.21.0': {} - '@zag-js/menu@1.7.0': + '@zag-js/menu@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/rect-utils': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/rect-utils': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/number-input@1.7.0': + '@zag-js/number-input@1.21.0': dependencies: - '@internationalized/number': 3.6.0 - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@internationalized/number': 3.6.3 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/pagination@1.7.0': + '@zag-js/pagination@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/pin-input@1.7.0': + '@zag-js/password-input@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/popover@1.7.0': + '@zag-js/pin-input@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/aria-hidden': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/focus-trap': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/remove-scroll': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/popper@1.7.0': + '@zag-js/popover@1.21.0': dependencies: - '@floating-ui/dom': 1.6.13 - '@zag-js/dom-query': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/aria-hidden': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-trap': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/remove-scroll': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/presence@1.7.0': + '@zag-js/popper@1.21.0': dependencies: - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 + '@floating-ui/dom': 1.7.2 + '@zag-js/dom-query': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/progress@1.7.0': + '@zag-js/presence@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 - '@zag-js/qr-code@1.7.0': + '@zag-js/progress@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/qr-code@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 proxy-memoize: 3.0.1 uqr: 0.1.2 - '@zag-js/radio-group@1.7.0': + '@zag-js/radio-group@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/element-rect': 1.7.0 - '@zag-js/focus-visible': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-visible': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/rating-group@1.7.0': + '@zag-js/rating-group@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/react@1.7.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': + '@zag-js/react@1.21.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@zag-js/core': 1.7.0 - '@zag-js/store': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) + '@zag-js/core': 1.21.0 + '@zag-js/store': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) - '@zag-js/rect-utils@1.7.0': {} + '@zag-js/rect-utils@1.21.0': {} - '@zag-js/remove-scroll@1.7.0': + '@zag-js/remove-scroll@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 + '@zag-js/dom-query': 1.21.0 - '@zag-js/scroll-snap@1.7.0': + '@zag-js/scroll-snap@1.21.0': dependencies: - '@zag-js/dom-query': 1.7.0 + '@zag-js/dom-query': 1.21.0 - '@zag-js/select@1.7.0': + '@zag-js/select@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/collection': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/collection': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/signature-pad@1.7.0': + '@zag-js/signature-pad@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 perfect-freehand: 1.2.2 - '@zag-js/slider@1.7.0': + '@zag-js/slider@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/element-size': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/splitter@1.7.0': + '@zag-js/splitter@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/steps@1.7.0': + '@zag-js/steps@1.21.0': dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 - '@zag-js/store@1.7.0': + '@zag-js/store@1.21.0': dependencies: proxy-compare: 3.0.1 - '@zag-js/switch@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/focus-visible': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/tabs@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/element-rect': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/tags-input@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/auto-resize': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/interact-outside': 1.7.0 - '@zag-js/live-region': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/time-picker@1.7.0(@internationalized/date@3.7.0)': - dependencies: - '@internationalized/date': 3.7.0 - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/timer@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/toast@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/toggle-group@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/toggle@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/tooltip@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/focus-visible': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/store': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/tour@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dismissable': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/focus-trap': 1.7.0 - '@zag-js/interact-outside': 1.7.0 - '@zag-js/popper': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/tree-view@1.7.0': - dependencies: - '@zag-js/anatomy': 1.7.0 - '@zag-js/collection': 1.7.0 - '@zag-js/core': 1.7.0 - '@zag-js/dom-query': 1.7.0 - '@zag-js/types': 1.7.0 - '@zag-js/utils': 1.7.0 - - '@zag-js/types@1.7.0': + '@zag-js/switch@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-visible': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/tabs@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/tags-input@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/auto-resize': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/interact-outside': 1.21.0 + '@zag-js/live-region': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/time-picker@1.21.0(@internationalized/date@3.8.2)': + dependencies: + '@internationalized/date': 3.8.2 + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/timer@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/toast@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/toggle-group@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/toggle@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/tooltip@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-visible': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/store': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/tour@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dismissable': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/focus-trap': 1.21.0 + '@zag-js/interact-outside': 1.21.0 + '@zag-js/popper': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/tree-view@1.21.0': + dependencies: + '@zag-js/anatomy': 1.21.0 + '@zag-js/collection': 1.21.0 + '@zag-js/core': 1.21.0 + '@zag-js/dom-query': 1.21.0 + '@zag-js/types': 1.21.0 + '@zag-js/utils': 1.21.0 + + '@zag-js/types@1.21.0': dependencies: csstype: 3.1.3 - '@zag-js/utils@1.7.0': {} + '@zag-js/utils@1.21.0': {} - acorn-jsx@5.3.2(acorn@8.14.1): + acorn-jsx@5.3.2(acorn@8.15.0): dependencies: - acorn: 8.14.1 + acorn: 8.15.0 - acorn@8.14.1: {} + acorn@8.15.0: {} ajv@6.12.6: dependencies: @@ -4205,10 +4366,10 @@ snapshots: axe-core@4.10.3: {} - axios@1.8.4: + axios@1.11.0: dependencies: follow-redirects: 1.15.9 - form-data: 4.0.2 + form-data: 4.0.4 proxy-from-env: 1.1.0 transitivePeerDependencies: - debug @@ -4217,7 +4378,7 @@ snapshots: babel-plugin-macros@3.1.0: dependencies: - '@babel/runtime': 7.27.0 + '@babel/runtime': 7.28.2 cosmiconfig: 7.1.0 resolve: 1.22.10 @@ -4225,12 +4386,12 @@ snapshots: binary-extensions@2.3.0: {} - brace-expansion@1.1.11: + brace-expansion@1.1.12: dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 - brace-expansion@2.0.1: + brace-expansion@2.0.2: dependencies: balanced-match: 1.0.2 @@ -4238,14 +4399,14 @@ snapshots: dependencies: fill-range: 7.1.1 - browserslist@4.24.4: + browserslist@4.25.1: dependencies: - caniuse-lite: 1.0.30001707 - electron-to-chromium: 1.5.128 + caniuse-lite: 1.0.30001727 + electron-to-chromium: 1.5.189 node-releases: 2.0.19 - update-browserslist-db: 1.1.3(browserslist@4.24.4) + update-browserslist-db: 1.1.3(browserslist@4.25.1) - builtin-modules@3.3.0: {} + builtin-modules@5.0.0: {} c12@1.11.1(magicast@0.3.5): dependencies: @@ -4287,26 +4448,23 @@ snapshots: camelcase@8.0.0: {} - caniuse-lite@1.0.30001707: {} + caniuse-lite@1.0.30001727: {} chai@5.2.0: dependencies: assertion-error: 2.0.1 check-error: 2.1.1 deep-eql: 5.0.2 - loupe: 3.1.3 + loupe: 3.1.4 pathval: 2.0.0 - chalk@3.0.0: - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - chalk@4.1.2: dependencies: ansi-styles: 4.3.0 supports-color: 7.2.0 + change-case@5.4.4: {} + check-error@2.1.1: {} chokidar@3.6.0: @@ -4323,7 +4481,7 @@ snapshots: chownr@2.0.0: {} - ci-info@4.2.0: {} + ci-info@4.3.0: {} citty@0.1.6: dependencies: @@ -4357,9 +4515,9 @@ snapshots: cookie@1.0.2: {} - core-js-compat@3.41.0: + core-js-compat@3.44.0: dependencies: - browserslist: 4.24.4 + browserslist: 4.25.1 cosmiconfig@7.1.0: dependencies: @@ -4399,7 +4557,7 @@ snapshots: es-errors: 1.3.0 is-data-view: 1.0.2 - debug@4.4.0: + debug@4.4.1: dependencies: ms: 2.1.3 @@ -4445,7 +4603,7 @@ snapshots: eastasianwidth@0.2.0: {} - electron-to-chromium@1.5.128: {} + electron-to-chromium@1.5.189: {} emoji-regex@8.0.0: {} @@ -4532,7 +4690,7 @@ snapshots: iterator.prototype: 1.1.5 safe-array-concat: 1.1.3 - es-module-lexer@1.6.0: {} + es-module-lexer@1.7.0: {} es-object-atoms@1.1.1: dependencies: @@ -4555,33 +4713,34 @@ snapshots: is-date-object: 1.1.0 is-symbol: 1.1.1 - esbuild@0.25.2: + esbuild@0.25.8: optionalDependencies: - '@esbuild/aix-ppc64': 0.25.2 - '@esbuild/android-arm': 0.25.2 - '@esbuild/android-arm64': 0.25.2 - '@esbuild/android-x64': 0.25.2 - '@esbuild/darwin-arm64': 0.25.2 - '@esbuild/darwin-x64': 0.25.2 - '@esbuild/freebsd-arm64': 0.25.2 - '@esbuild/freebsd-x64': 0.25.2 - '@esbuild/linux-arm': 0.25.2 - '@esbuild/linux-arm64': 0.25.2 - '@esbuild/linux-ia32': 0.25.2 - '@esbuild/linux-loong64': 0.25.2 - '@esbuild/linux-mips64el': 0.25.2 - '@esbuild/linux-ppc64': 0.25.2 - '@esbuild/linux-riscv64': 0.25.2 - '@esbuild/linux-s390x': 0.25.2 - '@esbuild/linux-x64': 0.25.2 - '@esbuild/netbsd-arm64': 0.25.2 - '@esbuild/netbsd-x64': 0.25.2 - '@esbuild/openbsd-arm64': 0.25.2 - '@esbuild/openbsd-x64': 0.25.2 - '@esbuild/sunos-x64': 0.25.2 - '@esbuild/win32-arm64': 0.25.2 - '@esbuild/win32-ia32': 0.25.2 - '@esbuild/win32-x64': 0.25.2 + '@esbuild/aix-ppc64': 0.25.8 + '@esbuild/android-arm': 0.25.8 + '@esbuild/android-arm64': 0.25.8 + '@esbuild/android-x64': 0.25.8 + '@esbuild/darwin-arm64': 0.25.8 + '@esbuild/darwin-x64': 0.25.8 + '@esbuild/freebsd-arm64': 0.25.8 + '@esbuild/freebsd-x64': 0.25.8 + '@esbuild/linux-arm': 0.25.8 + '@esbuild/linux-arm64': 0.25.8 + '@esbuild/linux-ia32': 0.25.8 + '@esbuild/linux-loong64': 0.25.8 + '@esbuild/linux-mips64el': 0.25.8 + '@esbuild/linux-ppc64': 0.25.8 + '@esbuild/linux-riscv64': 0.25.8 + '@esbuild/linux-s390x': 0.25.8 + '@esbuild/linux-x64': 0.25.8 + '@esbuild/netbsd-arm64': 0.25.8 + '@esbuild/netbsd-x64': 0.25.8 + '@esbuild/openbsd-arm64': 0.25.8 + '@esbuild/openbsd-x64': 0.25.8 + '@esbuild/openharmony-arm64': 0.25.8 + '@esbuild/sunos-x64': 0.25.8 + '@esbuild/win32-arm64': 0.25.8 + '@esbuild/win32-ia32': 0.25.8 + '@esbuild/win32-x64': 0.25.8 escalade@3.2.0: {} @@ -4589,11 +4748,11 @@ snapshots: escape-string-regexp@4.0.0: {} - eslint-config-prettier@10.1.1(eslint@9.23.0(jiti@1.21.7)): + eslint-config-prettier@10.1.8(eslint@9.32.0(jiti@1.21.7)): dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.32.0(jiti@1.21.7) - eslint-plugin-jsx-a11y@6.10.2(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-jsx-a11y@6.10.2(eslint@9.32.0(jiti@1.21.7)): dependencies: aria-query: 5.3.2 array-includes: 3.1.8 @@ -4603,7 +4762,7 @@ snapshots: axobject-query: 4.1.0 damerau-levenshtein: 1.0.8 emoji-regex: 9.2.2 - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.32.0(jiti@1.21.7) hasown: 2.0.2 jsx-ast-utils: 3.3.5 language-tags: 1.0.9 @@ -4612,34 +4771,34 @@ snapshots: safe-regex-test: 1.1.0 string.prototype.includes: 2.0.1 - eslint-plugin-perfectionist@4.10.1(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4): + eslint-plugin-perfectionist@4.15.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3): dependencies: - '@typescript-eslint/types': 8.28.0 - '@typescript-eslint/utils': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) + '@typescript-eslint/types': 8.34.1 + '@typescript-eslint/utils': 8.34.1(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + eslint: 9.32.0(jiti@1.21.7) natural-orderby: 5.0.0 transitivePeerDependencies: - supports-color - typescript - eslint-plugin-prettier@5.2.5(eslint-config-prettier@10.1.1(eslint@9.23.0(jiti@1.21.7)))(eslint@9.23.0(jiti@1.21.7))(prettier@3.5.3): + eslint-plugin-prettier@5.5.3(eslint-config-prettier@10.1.8(eslint@9.32.0(jiti@1.21.7)))(eslint@9.32.0(jiti@1.21.7))(prettier@3.6.2): dependencies: - eslint: 9.23.0(jiti@1.21.7) - prettier: 3.5.3 + eslint: 9.32.0(jiti@1.21.7) + prettier: 3.6.2 prettier-linter-helpers: 1.0.0 - synckit: 0.10.3 + synckit: 0.11.11 optionalDependencies: - eslint-config-prettier: 10.1.1(eslint@9.23.0(jiti@1.21.7)) + eslint-config-prettier: 10.1.8(eslint@9.32.0(jiti@1.21.7)) - eslint-plugin-react-hooks@4.6.2(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-react-hooks@5.2.0(eslint@9.32.0(jiti@1.21.7)): dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.32.0(jiti@1.21.7) - eslint-plugin-react-refresh@0.4.19(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-react-refresh@0.4.20(eslint@9.32.0(jiti@1.21.7)): dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.32.0(jiti@1.21.7) - eslint-plugin-react@7.37.4(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-react@7.37.5(eslint@9.32.0(jiti@1.21.7)): dependencies: array-includes: 3.1.8 array.prototype.findlast: 1.2.5 @@ -4647,7 +4806,7 @@ snapshots: array.prototype.tosorted: 1.1.4 doctrine: 2.1.0 es-iterator-helpers: 1.2.1 - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.32.0(jiti@1.21.7) estraverse: 5.3.0 hasown: 2.0.2 jsx-ast-utils: 3.3.5 @@ -4661,58 +4820,60 @@ snapshots: string.prototype.matchall: 4.0.12 string.prototype.repeat: 1.0.0 - eslint-plugin-unicorn@55.0.0(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-unicorn@60.0.0(eslint@9.32.0(jiti@1.21.7)): dependencies: - '@babel/helper-validator-identifier': 7.25.9 - '@eslint-community/eslint-utils': 4.5.1(eslint@9.23.0(jiti@1.21.7)) - ci-info: 4.2.0 + '@babel/helper-validator-identifier': 7.27.1 + '@eslint-community/eslint-utils': 4.7.0(eslint@9.32.0(jiti@1.21.7)) + '@eslint/plugin-kit': 0.3.4 + change-case: 5.4.4 + ci-info: 4.3.0 clean-regexp: 1.0.0 - core-js-compat: 3.41.0 - eslint: 9.23.0(jiti@1.21.7) + core-js-compat: 3.44.0 + eslint: 9.32.0(jiti@1.21.7) esquery: 1.6.0 - globals: 15.15.0 - indent-string: 4.0.0 - is-builtin-module: 3.2.1 + find-up-simple: 1.0.1 + globals: 16.3.0 + indent-string: 5.0.0 + is-builtin-module: 5.0.0 jsesc: 3.1.0 pluralize: 8.0.0 - read-pkg-up: 7.0.1 regexp-tree: 0.1.27 - regjsparser: 0.10.0 - semver: 7.7.1 - strip-indent: 3.0.0 + regjsparser: 0.12.0 + semver: 7.7.2 + strip-indent: 4.0.0 - eslint-scope@8.3.0: + eslint-scope@8.4.0: dependencies: esrecurse: 4.3.0 estraverse: 5.3.0 eslint-visitor-keys@3.4.3: {} - eslint-visitor-keys@4.2.0: {} + eslint-visitor-keys@4.2.1: {} - eslint@9.23.0(jiti@1.21.7): + eslint@9.32.0(jiti@1.21.7): dependencies: - '@eslint-community/eslint-utils': 4.5.1(eslint@9.23.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.32.0(jiti@1.21.7)) '@eslint-community/regexpp': 4.12.1 - '@eslint/config-array': 0.19.2 - '@eslint/config-helpers': 0.2.0 - '@eslint/core': 0.12.0 + '@eslint/config-array': 0.21.0 + '@eslint/config-helpers': 0.3.0 + '@eslint/core': 0.15.1 '@eslint/eslintrc': 3.3.1 - '@eslint/js': 9.23.0 - '@eslint/plugin-kit': 0.2.7 + '@eslint/js': 9.32.0 + '@eslint/plugin-kit': 0.3.4 '@humanfs/node': 0.16.6 '@humanwhocodes/module-importer': 1.0.1 - '@humanwhocodes/retry': 0.4.2 - '@types/estree': 1.0.6 + '@humanwhocodes/retry': 0.4.3 + '@types/estree': 1.0.8 '@types/json-schema': 7.0.15 ajv: 6.12.6 chalk: 4.1.2 cross-spawn: 7.0.6 - debug: 4.4.0 + debug: 4.4.1 escape-string-regexp: 4.0.0 - eslint-scope: 8.3.0 - eslint-visitor-keys: 4.2.0 - espree: 10.3.0 + eslint-scope: 8.4.0 + eslint-visitor-keys: 4.2.1 + espree: 10.4.0 esquery: 1.6.0 esutils: 2.0.3 fast-deep-equal: 3.1.3 @@ -4732,11 +4893,11 @@ snapshots: transitivePeerDependencies: - supports-color - espree@10.3.0: + espree@10.4.0: dependencies: - acorn: 8.14.1 - acorn-jsx: 5.3.2(acorn@8.14.1) - eslint-visitor-keys: 4.2.0 + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + eslint-visitor-keys: 4.2.1 esquery@1.6.0: dependencies: @@ -4750,11 +4911,11 @@ snapshots: estree-walker@3.0.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 esutils@2.0.3: {} - expect-type@1.2.0: {} + expect-type@1.2.1: {} fast-deep-equal@3.1.3: {} @@ -4778,6 +4939,10 @@ snapshots: dependencies: reusify: 1.1.0 + fdir@6.4.6(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + file-entry-cache@8.0.0: dependencies: flat-cache: 4.0.1 @@ -4788,10 +4953,7 @@ snapshots: find-root@1.1.0: {} - find-up@4.1.0: - dependencies: - locate-path: 5.0.0 - path-exists: 4.0.0 + find-up-simple@1.0.1: {} find-up@5.0.0: dependencies: @@ -4816,11 +4978,12 @@ snapshots: cross-spawn: 7.0.6 signal-exit: 4.1.0 - form-data@4.0.2: + form-data@4.0.4: dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 es-set-tostringtag: 2.1.0 + hasown: 2.0.2 mime-types: 2.1.35 fs-minipass@2.1.0: @@ -4898,7 +5061,7 @@ snapshots: globals@14.0.0: {} - globals@15.15.0: {} + globals@16.3.0: {} globalthis@1.0.4: dependencies: @@ -4918,9 +5081,10 @@ snapshots: optionalDependencies: uglify-js: 3.19.3 - happy-dom@17.4.4: + happy-dom@18.0.1: dependencies: - webidl-conversions: 7.0.0 + '@types/node': 20.19.1 + '@types/whatwg-mimetype': 3.0.2 whatwg-mimetype: 3.0.0 has-bigints@1.1.0: {} @@ -4949,10 +5113,10 @@ snapshots: dependencies: react-is: 16.13.1 - hosted-git-info@2.8.9: {} - ignore@5.3.2: {} + ignore@7.0.5: {} + import-fresh@3.3.1: dependencies: parent-module: 1.0.1 @@ -4962,6 +5126,8 @@ snapshots: indent-string@4.0.0: {} + indent-string@5.0.0: {} + internal-slot@1.1.0: dependencies: es-errors: 1.3.0 @@ -4997,9 +5163,9 @@ snapshots: call-bound: 1.0.4 has-tostringtag: 1.0.2 - is-builtin-module@3.2.1: + is-builtin-module@5.0.0: dependencies: - builtin-modules: 3.3.0 + builtin-modules: 5.0.0 is-callable@1.2.7: {} @@ -5110,13 +5276,13 @@ snapshots: js-tokens@4.0.0: {} + js-tokens@9.0.1: {} + js-yaml@4.1.0: dependencies: argparse: 2.0.1 - jsesc@0.5.0: {} - - jsesc@2.5.2: {} + jsesc@3.0.2: {} jsesc@3.1.0: {} @@ -5152,10 +5318,6 @@ snapshots: lines-and-columns@1.2.4: {} - locate-path@5.0.0: - dependencies: - p-locate: 4.1.0 - locate-path@6.0.0: dependencies: p-locate: 5.0.0 @@ -5168,7 +5330,7 @@ snapshots: dependencies: js-tokens: 4.0.0 - loupe@3.1.3: {} + loupe@3.1.4: {} lru-cache@10.4.3: {} @@ -5180,8 +5342,8 @@ snapshots: magicast@0.3.5: dependencies: - '@babel/parser': 7.27.0 - '@babel/types': 7.27.0 + '@babel/parser': 7.28.0 + '@babel/types': 7.28.2 source-map-js: 1.2.1 optional: true @@ -5204,11 +5366,11 @@ snapshots: minimatch@3.1.2: dependencies: - brace-expansion: 1.1.11 + brace-expansion: 1.1.12 minimatch@9.0.5: dependencies: - brace-expansion: 2.0.1 + brace-expansion: 2.0.2 minimist@1.2.8: {} @@ -5231,7 +5393,7 @@ snapshots: mlly@1.7.4: dependencies: - acorn: 8.14.1 + acorn: 8.15.0 pathe: 2.0.3 pkg-types: 1.3.1 ufo: 1.5.4 @@ -5246,22 +5408,15 @@ snapshots: neo-async@2.6.2: {} - next-themes@0.4.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0): + next-themes@0.4.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) node-fetch-native@1.6.6: {} node-releases@2.0.19: {} - normalize-package-data@2.5.0: - dependencies: - hosted-git-info: 2.8.9 - resolve: 1.22.10 - semver: 5.7.2 - validate-npm-package-license: 3.0.4 - normalize-path@3.0.0: {} nypm@0.5.4: @@ -5326,24 +5481,14 @@ snapshots: object-keys: 1.1.1 safe-push-apply: 1.0.0 - p-limit@2.3.0: - dependencies: - p-try: 2.2.0 - p-limit@3.1.0: dependencies: yocto-queue: 0.1.0 - p-locate@4.1.0: - dependencies: - p-limit: 2.3.0 - p-locate@5.0.0: dependencies: p-limit: 3.1.0 - p-try@2.2.0: {} - package-json-from-dist@1.0.1: {} parent-module@1.0.1: @@ -5352,7 +5497,7 @@ snapshots: parse-json@5.2.0: dependencies: - '@babel/code-frame': 7.26.2 + '@babel/code-frame': 7.27.1 error-ex: 1.3.2 json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 @@ -5388,6 +5533,8 @@ snapshots: picomatch@4.0.2: {} + picomatch@4.0.3: {} + pkg-types@1.3.1: dependencies: confbox: 0.1.8 @@ -5398,7 +5545,7 @@ snapshots: possible-typed-array-names@1.1.0: {} - postcss@8.5.3: + postcss@8.5.6: dependencies: nanoid: 3.3.11 picocolors: 1.1.1 @@ -5410,7 +5557,7 @@ snapshots: dependencies: fast-diff: 1.3.0 - prettier@3.5.3: {} + prettier@3.6.2: {} pretty-format@27.5.1: dependencies: @@ -5441,56 +5588,41 @@ snapshots: defu: 6.1.4 destr: 2.0.3 - react-cookie@8.0.1(react@19.0.0): + react-cookie@8.0.1(react@19.1.1): dependencies: '@types/hoist-non-react-statics': 3.3.6 hoist-non-react-statics: 3.3.2 - react: 19.0.0 + react: 19.1.1 universal-cookie: 8.0.1 - react-dom@19.0.0(react@19.0.0): + react-dom@19.1.1(react@19.1.1): dependencies: - react: 19.0.0 - scheduler: 0.25.0 + react: 19.1.1 + scheduler: 0.26.0 - react-hook-form@7.54.2(react@19.0.0): + react-hook-form@7.61.1(react@19.1.1): dependencies: - react: 19.0.0 + react: 19.1.1 react-is@16.13.1: {} react-is@17.0.2: {} - react-router-dom@7.4.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0): + react-router-dom@7.7.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: - react: 19.0.0 - react-dom: 19.0.0(react@19.0.0) - react-router: 7.4.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) + react-router: 7.7.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react-router@7.4.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0): + react-router@7.7.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: - '@types/cookie': 0.6.0 cookie: 1.0.2 - react: 19.0.0 + react: 19.1.1 set-cookie-parser: 2.7.1 - turbo-stream: 2.4.0 optionalDependencies: - react-dom: 19.0.0(react@19.0.0) + react-dom: 19.1.1(react@19.1.1) - react@19.0.0: {} - - read-pkg-up@7.0.1: - dependencies: - find-up: 4.1.0 - read-pkg: 5.2.0 - type-fest: 0.8.1 - - read-pkg@5.2.0: - dependencies: - '@types/normalize-package-data': 2.4.4 - normalize-package-data: 2.5.0 - parse-json: 5.2.0 - type-fest: 0.6.0 + react@19.1.1: {} readdirp@3.6.0: dependencies: @@ -5512,8 +5644,6 @@ snapshots: get-proto: 1.0.1 which-builtin-type: 1.2.1 - regenerator-runtime@0.14.1: {} - regexp-tree@0.1.27: {} regexp.prototype.flags@1.5.4: @@ -5525,9 +5655,9 @@ snapshots: gopd: 1.2.0 set-function-name: 2.0.2 - regjsparser@0.10.0: + regjsparser@0.12.0: dependencies: - jsesc: 0.5.0 + jsesc: 3.0.2 resolve-from@4.0.0: {} @@ -5545,30 +5675,30 @@ snapshots: reusify@1.1.0: {} - rollup@4.39.0: + rollup@4.46.2: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.39.0 - '@rollup/rollup-android-arm64': 4.39.0 - '@rollup/rollup-darwin-arm64': 4.39.0 - '@rollup/rollup-darwin-x64': 4.39.0 - '@rollup/rollup-freebsd-arm64': 4.39.0 - '@rollup/rollup-freebsd-x64': 4.39.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.39.0 - '@rollup/rollup-linux-arm-musleabihf': 4.39.0 - '@rollup/rollup-linux-arm64-gnu': 4.39.0 - '@rollup/rollup-linux-arm64-musl': 4.39.0 - '@rollup/rollup-linux-loongarch64-gnu': 4.39.0 - '@rollup/rollup-linux-powerpc64le-gnu': 4.39.0 - '@rollup/rollup-linux-riscv64-gnu': 4.39.0 - '@rollup/rollup-linux-riscv64-musl': 4.39.0 - '@rollup/rollup-linux-s390x-gnu': 4.39.0 - '@rollup/rollup-linux-x64-gnu': 4.39.0 - '@rollup/rollup-linux-x64-musl': 4.39.0 - '@rollup/rollup-win32-arm64-msvc': 4.39.0 - '@rollup/rollup-win32-ia32-msvc': 4.39.0 - '@rollup/rollup-win32-x64-msvc': 4.39.0 + '@rollup/rollup-android-arm-eabi': 4.46.2 + '@rollup/rollup-android-arm64': 4.46.2 + '@rollup/rollup-darwin-arm64': 4.46.2 + '@rollup/rollup-darwin-x64': 4.46.2 + '@rollup/rollup-freebsd-arm64': 4.46.2 + '@rollup/rollup-freebsd-x64': 4.46.2 + '@rollup/rollup-linux-arm-gnueabihf': 4.46.2 + '@rollup/rollup-linux-arm-musleabihf': 4.46.2 + '@rollup/rollup-linux-arm64-gnu': 4.46.2 + '@rollup/rollup-linux-arm64-musl': 4.46.2 + '@rollup/rollup-linux-loongarch64-gnu': 4.46.2 + '@rollup/rollup-linux-ppc64-gnu': 4.46.2 + '@rollup/rollup-linux-riscv64-gnu': 4.46.2 + '@rollup/rollup-linux-riscv64-musl': 4.46.2 + '@rollup/rollup-linux-s390x-gnu': 4.46.2 + '@rollup/rollup-linux-x64-gnu': 4.46.2 + '@rollup/rollup-linux-x64-musl': 4.46.2 + '@rollup/rollup-win32-arm64-msvc': 4.46.2 + '@rollup/rollup-win32-ia32-msvc': 4.46.2 + '@rollup/rollup-win32-x64-msvc': 4.46.2 fsevents: 2.3.3 run-parallel@1.2.0: @@ -5594,13 +5724,11 @@ snapshots: es-errors: 1.3.0 is-regex: 1.2.1 - scheduler@0.25.0: {} - - semver@5.7.2: {} + scheduler@0.26.0: {} semver@6.3.1: {} - semver@7.7.1: {} + semver@7.7.2: {} set-cookie-parser@2.7.1: {} @@ -5670,23 +5798,9 @@ snapshots: source-map@0.6.1: {} - spdx-correct@3.2.0: - dependencies: - spdx-expression-parse: 3.0.1 - spdx-license-ids: 3.0.21 - - spdx-exceptions@2.5.0: {} - - spdx-expression-parse@3.0.1: - dependencies: - spdx-exceptions: 2.5.0 - spdx-license-ids: 3.0.21 - - spdx-license-ids@3.0.21: {} - stackback@0.0.2: {} - std-env@3.8.1: {} + std-env@3.9.0: {} string-width@4.2.3: dependencies: @@ -5762,8 +5876,16 @@ snapshots: dependencies: min-indent: 1.0.1 + strip-indent@4.0.0: + dependencies: + min-indent: 1.0.1 + strip-json-comments@3.1.1: {} + strip-literal@3.0.0: + dependencies: + js-tokens: 9.0.1 + stylis@4.2.0: {} supports-color@7.2.0: @@ -5772,10 +5894,9 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} - synckit@0.10.3: + synckit@0.11.11: dependencies: - '@pkgr/core': 0.2.0 - tslib: 2.8.1 + '@pkgr/core': 0.2.9 tar@6.2.1: dependencies: @@ -5790,21 +5911,24 @@ snapshots: tinyexec@0.3.2: {} - tinypool@1.0.2: {} + tinyglobby@0.2.14: + dependencies: + fdir: 6.4.6(picomatch@4.0.3) + picomatch: 4.0.3 - tinyrainbow@2.0.0: {} + tinypool@1.1.1: {} - tinyspy@3.0.2: {} + tinyrainbow@2.0.0: {} - to-fast-properties@2.0.0: {} + tinyspy@4.0.3: {} to-regex-range@5.0.1: dependencies: is-number: 7.0.0 - ts-api-utils@2.1.0(typescript@5.5.4): + ts-api-utils@2.1.0(typescript@5.8.3): dependencies: - typescript: 5.5.4 + typescript: 5.8.3 ts-morph@22.0.0: dependencies: @@ -5813,16 +5937,10 @@ snapshots: tslib@2.8.1: {} - turbo-stream@2.4.0: {} - type-check@0.4.0: dependencies: prelude-ls: 1.2.1 - type-fest@0.6.0: {} - - type-fest@0.8.1: {} - typed-array-buffer@1.0.3: dependencies: call-bound: 1.0.4 @@ -5856,17 +5974,18 @@ snapshots: possible-typed-array-names: 1.1.0 reflect.getprototypeof: 1.0.10 - typescript-eslint@8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4): + typescript-eslint@8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.28.0(@typescript-eslint/parser@8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4))(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/parser': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/utils': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) - typescript: 5.5.4 + '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.32.0(jiti@1.21.7))(typescript@5.8.3) + eslint: 9.32.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - typescript@5.5.4: {} + typescript@5.8.3: {} ufo@1.5.4: {} @@ -5880,13 +5999,15 @@ snapshots: has-symbols: 1.1.0 which-boxed-primitive: 1.1.1 + undici-types@6.21.0: {} + universal-cookie@8.0.1: dependencies: cookie: 1.0.2 - update-browserslist-db@1.1.3(browserslist@4.24.4): + update-browserslist-db@1.1.3(browserslist@4.25.1): dependencies: - browserslist: 4.24.4 + browserslist: 4.25.1 escalade: 3.2.0 picocolors: 1.1.1 @@ -5896,18 +6017,13 @@ snapshots: dependencies: punycode: 2.3.1 - validate-npm-package-license@3.0.4: - dependencies: - spdx-correct: 3.2.0 - spdx-expression-parse: 3.0.1 - - vite-node@3.0.9(jiti@1.21.7): + vite-node@3.2.4(@types/node@20.19.1)(jiti@1.21.7): dependencies: cac: 6.7.14 - debug: 4.4.0 - es-module-lexer: 1.6.0 + debug: 4.4.1 + es-module-lexer: 1.7.0 pathe: 2.0.3 - vite: 6.2.6(jiti@1.21.7) + vite: 7.0.6(@types/node@20.19.1)(jiti@1.21.7) transitivePeerDependencies: - '@types/node' - jiti @@ -5922,43 +6038,51 @@ snapshots: - tsx - yaml - vite-plugin-css-injected-by-js@3.5.2(vite@6.2.6(jiti@1.21.7)): + vite-plugin-css-injected-by-js@3.5.2(vite@7.0.6(@types/node@20.19.1)(jiti@1.21.7)): dependencies: - vite: 6.2.6(jiti@1.21.7) + vite: 7.0.6(@types/node@20.19.1)(jiti@1.21.7) - vite@6.2.6(jiti@1.21.7): + vite@7.0.6(@types/node@20.19.1)(jiti@1.21.7): dependencies: - esbuild: 0.25.2 - postcss: 8.5.3 - rollup: 4.39.0 + esbuild: 0.25.8 + fdir: 6.4.6(picomatch@4.0.3) + picomatch: 4.0.3 + postcss: 8.5.6 + rollup: 4.46.2 + tinyglobby: 0.2.14 optionalDependencies: + '@types/node': 20.19.1 fsevents: 2.3.3 jiti: 1.21.7 - vitest@3.0.9(happy-dom@17.4.4)(jiti@1.21.7): + vitest@3.2.4(@types/node@20.19.1)(happy-dom@18.0.1)(jiti@1.21.7): dependencies: - '@vitest/expect': 3.0.9 - '@vitest/mocker': 3.0.9(vite@6.2.6(jiti@1.21.7)) - '@vitest/pretty-format': 3.0.9 - '@vitest/runner': 3.0.9 - '@vitest/snapshot': 3.0.9 - '@vitest/spy': 3.0.9 - '@vitest/utils': 3.0.9 + '@types/chai': 5.2.2 + '@vitest/expect': 3.2.4 + '@vitest/mocker': 3.2.4(vite@7.0.6(@types/node@20.19.1)(jiti@1.21.7)) + '@vitest/pretty-format': 3.2.4 + '@vitest/runner': 3.2.4 + '@vitest/snapshot': 3.2.4 + '@vitest/spy': 3.2.4 + '@vitest/utils': 3.2.4 chai: 5.2.0 - debug: 4.4.0 - expect-type: 1.2.0 + debug: 4.4.1 + expect-type: 1.2.1 magic-string: 0.30.17 pathe: 2.0.3 - std-env: 3.8.1 + picomatch: 4.0.2 + std-env: 3.9.0 tinybench: 2.9.0 tinyexec: 0.3.2 - tinypool: 1.0.2 + tinyglobby: 0.2.14 + tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 6.2.6(jiti@1.21.7) - vite-node: 3.0.9(jiti@1.21.7) + vite: 7.0.6(@types/node@20.19.1)(jiti@1.21.7) + vite-node: 3.2.4(@types/node@20.19.1)(jiti@1.21.7) why-is-node-running: 2.3.0 optionalDependencies: - happy-dom: 17.4.4 + '@types/node': 20.19.1 + happy-dom: 18.0.1 transitivePeerDependencies: - jiti - less @@ -5973,8 +6097,6 @@ snapshots: - tsx - yaml - webidl-conversions@7.0.0: {} - whatwg-mimetype@3.0.0: {} which-boxed-primitive@1.1.1: diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/core.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/core.js index 195e5c5e3aa3c..342b4d7d610b6 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/core.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/core.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/perfectionist.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/perfectionist.js index de0aaf1ca3b56..6054079ef998f 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/perfectionist.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/perfectionist.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/prettier.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/prettier.js index 970c5c6ffb9ef..9fa5cdae0b8a0 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/prettier.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/prettier.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/react.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/react.js index 6483a2e5579b1..14b7c08b665b6 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/react.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/react.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/stylistic.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/stylistic.js index 3307be64c3401..416b9a6196f95 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/stylistic.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/stylistic.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/typescript.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/typescript.js index 2583b602e7264..16109237d37ac 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/typescript.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/typescript.js @@ -1,5 +1,4 @@ /* eslint-disable max-lines */ - /*! * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -18,7 +17,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/unicorn.js b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/unicorn.js index 4d8c28e84e7a5..46327bf9693dc 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/unicorn.js +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/rules/unicorn.js @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - /** * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/Login.tsx b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/Login.tsx index b1963a02484c5..6aa65345361fa 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/Login.tsx +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/Login.tsx @@ -32,6 +32,17 @@ export type LoginBody = { username: string; }; +const isSafeUrl = (targetUrl: string): boolean => { + try { + const base = new URL(globalThis.location.origin); + const target = new URL(targetUrl, base); + + return (target.protocol === "http:" || target.protocol === "https:") && target.origin === base.origin; + } catch { + return false; + } +}; + const LOCAL_STORAGE_DISABLE_BANNER_KEY = "disable-sam-banner"; export const Login = () => { @@ -42,15 +53,20 @@ export const Login = () => { ); const onSuccess = (data: LoginResponse) => { + // Fallback similar to FabAuthManager, strip off the next + const fallback = "/"; + // Redirect to appropriate page with the token - const next = searchParams.get("next"); + const next = searchParams.get("next") ?? fallback; setCookie("_token", data.access_token, { path: "/", secure: globalThis.location.protocol !== "http:", }); - globalThis.location.replace(next ?? ""); + const redirectTarget = isSafeUrl(next) ? next : fallback; + + globalThis.location.replace(redirectTarget); }; const { createToken, error, isPending, setError } = useCreateToken({ onSuccess, @@ -79,7 +95,7 @@ export const Login = () => { - {error === null && } + {Boolean(error) && } Enter your username and password below: diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/LoginForm.tsx b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/LoginForm.tsx index cd79cdd817329..69a6b4f14fc0a 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/LoginForm.tsx +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/src/login/LoginForm.tsx @@ -53,7 +53,8 @@ export const LoginForm = ({ isPending, onLogin }: LoginFormProps) => { render={({ field, fieldState }) => ( Username - + {/* eslint-disable-next-line jsx-a11y/no-autofocus */} + )} rules={{ required: true }} diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/utils.py b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/utils.py new file mode 100644 index 0000000000000..bd2767dc46ffe --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/utils.py @@ -0,0 +1,48 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import HTTPException, Request, status +from fastapi.exceptions import RequestValidationError +from pydantic import ValidationError + +from airflow.api_fastapi.auth.managers.simple.datamodels.login import LoginBody +from airflow.api_fastapi.common.headers import HeaderContentTypeJsonOrForm +from airflow.api_fastapi.common.types import Mimetype + + +async def parse_login_body( + request: Request, + content_type: HeaderContentTypeJsonOrForm, +) -> LoginBody: + try: + if content_type == Mimetype.JSON: + body = await request.json() + elif content_type == Mimetype.FORM: + form = await request.form() + body = { + "username": form.get("username"), + "password": form.get("password"), + } + else: + raise HTTPException( + status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, + detail="Unsupported Media Type", + ) + return LoginBody(**body) + except ValidationError as e: + raise RequestValidationError(repr(e)) diff --git a/airflow-core/src/airflow/api_fastapi/auth/tokens.py b/airflow-core/src/airflow/api_fastapi/auth/tokens.py index a1e6729e5ea44..276ae17153da0 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/tokens.py +++ b/airflow-core/src/airflow/api_fastapi/auth/tokens.py @@ -19,10 +19,11 @@ import json import os import time +import uuid from base64 import urlsafe_b64encode -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime -from typing import TYPE_CHECKING, Any, Callable, Literal, overload +from typing import TYPE_CHECKING, Any, Literal, overload import attrs import httpx @@ -33,7 +34,7 @@ from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.serialization import load_pem_private_key -from airflow.utils import timezone +from airflow._shared.timezones import timezone if TYPE_CHECKING: from jwt.algorithms import AllowedKeys, AllowedPrivateKeys @@ -225,7 +226,7 @@ def _conf_factory(section, key, **kwargs): def factory() -> str: from airflow.configuration import conf - return conf.get(section, key, **kwargs, suppress_warnings=True) # type: ignore[return-value] + return conf.get(section, key, **kwargs, suppress_warnings=True) return factory @@ -370,6 +371,18 @@ def _load_key_from_configured_file() -> AllowedPrivateKeys | None: return _pem_to_key(fh.read()) +def _generate_kid(gen) -> str: + if not gen._private_key: + return "not-used" + + if kid := _conf_factory("api_auth", "jwt_kid", fallback=None)(): + return kid + + # Generate it from the thumbprint of the private key + info = key_to_jwk_dict(gen._private_key) + return info["kid"] + + @attrs.define(repr=False, kw_only=True) class JWTGenerator: """Generate JWT tokens.""" @@ -390,7 +403,7 @@ class JWTGenerator: ) """A pre-shared secret key to sign tokens with symmetric encryption""" - kid: str = attrs.field() + kid: str = attrs.field(default=attrs.Factory(_generate_kid, takes_self=True)) valid_for: float audience: str issuer: str | list[str] | None = attrs.field( @@ -400,18 +413,6 @@ class JWTGenerator: factory=_conf_list_factory("api_auth", "jwt_algorithm", first_only=True, fallback="GUESS") ) - @kid.default - def _generate_kid(self): - if not self._private_key: - return "not-used" - - if kid := _conf_factory("api_auth", "jwt_kid", fallback=None)(): - return kid - - # Generate it from the thumbprint of the private key - info = key_to_jwk_dict(self._private_key) - return info["kid"] - def __attrs_post_init__(self): if not (self._private_key is None) ^ (self._secret_key is None): raise ValueError("Exactly one of private_key and secret_key must be specified") @@ -437,12 +438,14 @@ def generate(self, extras: dict[str, Any] | None = None, headers: dict[str, Any] """Generate a signed JWT for the subject.""" now = int(datetime.now(tz=timezone.utc).timestamp()) claims = { + "jti": uuid.uuid4().hex, "iss": self.issuer, "aud": self.audience, "nbf": now, "exp": int(now + self.valid_for), "iat": now, } + if claims["iss"] is None: del claims["iss"] if claims["aud"] is None: @@ -535,7 +538,7 @@ def get_signing_key(section: str, key: str, make_secret_key_if_needed: bool = Tr raise ValueError(f"The value {section}/{key} must be set!") # Mypy can't grock the `if not secret_key` - return secret_key # type: ignore[return-value] + return secret_key def get_signing_args(make_secret_key_if_needed: bool = True) -> dict[str, Any]: diff --git a/airflow-core/src/airflow/api_fastapi/common/dagbag.py b/airflow-core/src/airflow/api_fastapi/common/dagbag.py new file mode 100644 index 0000000000000..f1c7271b020f6 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/common/dagbag.py @@ -0,0 +1,83 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from fastapi import Depends, HTTPException, Request, status +from sqlalchemy.orm import Session + +from airflow.models.dagbag import DBDagBag + +if TYPE_CHECKING: + from airflow.models.dag import DAG + from airflow.models.dagrun import DagRun + + +def create_dag_bag() -> DBDagBag: + """Create DagBag to retrieve DAGs from the database.""" + return DBDagBag() + + +def dag_bag_from_app(request: Request) -> DBDagBag: + """ + FastAPI dependency resolver that returns the shared DagBag instance from app.state. + + This ensures that all API routes using DagBag via dependency injection receive the same + singleton instance that was initialized at app startup. + """ + return request.app.state.dag_bag + + +def get_latest_version_of_dag( + dag_bag: DBDagBag, dag_id: str, session: Session, include_reason: bool = False +) -> DAG: + dag = dag_bag.get_latest_version_of_dag(dag_id, session=session) + if not dag: + if include_reason: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + detail={ + "reason": "not_found", + "message": f"The Dag with ID: `{dag_id}` was not found", + }, + ) + raise HTTPException(status.HTTP_404_NOT_FOUND, f"The Dag with ID: `{dag_id}` was not found") + return dag + + +def get_dag_for_run(dag_bag: DBDagBag, dag_run: DagRun, session: Session) -> DAG: + dag = dag_bag.get_dag_for_run(dag_run, session=session) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"The Dag with ID: `{dag_run.dag_id}` was not found") + return dag + + +def get_dag_for_run_or_latest_version( + dag_bag: DBDagBag, dag_run: DagRun | None, dag_id: str | None, session: Session +) -> DAG: + dag: DAG | None = None + if dag_run: + dag = dag_bag.get_dag_for_run(dag_run, session=session) + elif dag_id: + dag = dag_bag.get_latest_version_of_dag(dag_id, session=session) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"The Dag with ID: `{dag_id}` was not found") + return dag + + +DagBagDep = Annotated[DBDagBag, Depends(dag_bag_from_app)] diff --git a/airflow-core/src/airflow/api_fastapi/common/db/dag_runs.py b/airflow-core/src/airflow/api_fastapi/common/db/dag_runs.py index 8f4b02a067fb3..1ca5f1d261b54 100644 --- a/airflow-core/src/airflow/api_fastapi/common/db/dag_runs.py +++ b/airflow-core/src/airflow/api_fastapi/common/db/dag_runs.py @@ -19,14 +19,17 @@ from sqlalchemy import func, select +from airflow.models.dag import DagModel from airflow.models.dagrun import DagRun dagruns_select_with_state_count = ( select( DagRun.dag_id, DagRun.state, + DagModel.dag_display_name, func.count(DagRun.state), ) - .group_by(DagRun.dag_id, DagRun.state) + .join(DagModel, DagRun.dag_id == DagModel.dag_id) + .group_by(DagRun.dag_id, DagRun.state, DagModel.dag_display_name) .order_by(DagRun.dag_id) ) diff --git a/airflow-core/src/airflow/api_fastapi/common/db/dags.py b/airflow-core/src/airflow/api_fastapi/common/db/dags.py index d9d2b66e6f7a5..cefce66279883 100644 --- a/airflow-core/src/airflow/api_fastapi/common/db/dags.py +++ b/airflow-core/src/airflow/api_fastapi/common/db/dags.py @@ -21,72 +21,53 @@ from sqlalchemy import func, select +from airflow.api_fastapi.common.db.common import ( + apply_filters_to_select, +) +from airflow.api_fastapi.common.parameters import BaseParam, RangeFilter, SortParam +from airflow.models import DagModel +from airflow.models.dagrun import DagRun + if TYPE_CHECKING: from sqlalchemy.sql import Select -from airflow.models.dag import DagModel -from airflow.models.dagrun import DagRun +def generate_dag_with_latest_run_query(max_run_filters: list[BaseParam], order_by: SortParam) -> Select: + query = select(DagModel) -def generate_dag_with_latest_run_query(dag_runs_cte: Select | None = None) -> Select: - latest_dag_run_per_dag_id_cte = ( - select(DagRun.dag_id, func.max(DagRun.start_date).label("start_date")) - .where() + max_run_id_query = ( # ordering by id will not always be "latest run", but it's a simplifying assumption + select(DagRun.dag_id, func.max(DagRun.id).label("max_dag_run_id")) .group_by(DagRun.dag_id) - .cte() + .subquery(name="mrq") ) - dags_select_with_latest_dag_run = ( - select(DagModel) - .join( - latest_dag_run_per_dag_id_cte, - DagModel.dag_id == latest_dag_run_per_dag_id_cte.c.dag_id, - isouter=True, - ) - .join( - DagRun, - DagRun.start_date == latest_dag_run_per_dag_id_cte.c.start_date - and DagRun.dag_id == latest_dag_run_per_dag_id_cte.c.dag_id, - isouter=True, - ) - .order_by(DagModel.dag_id) - ) + has_max_run_filter = False - if dag_runs_cte is None: - return dags_select_with_latest_dag_run + for max_run_filter in max_run_filters: + if isinstance(max_run_filter, RangeFilter): + if max_run_filter.is_active(): + has_max_run_filter = True + break + if max_run_filter.value: + has_max_run_filter = True + break - dag_run_filters_cte = ( - select(DagModel.dag_id) - .join( - dag_runs_cte, - DagModel.dag_id == dag_runs_cte.c.dag_id, - ) - .join( - DagRun, - DagRun.dag_id == dag_runs_cte.c.dag_id, - ) - .group_by(DagModel.dag_id) - .cte() + requested_order_by_set = set(order_by.value) if order_by.value is not None else set() + dag_run_order_by_set = set( + ["last_run_state", "last_run_start_date", "-last_run_state", "-last_run_start_date"], ) - dags_with_latest_and_filtered_runs = ( - select(DagModel) - .join( - dag_run_filters_cte, - dag_run_filters_cte.c.dag_id == DagModel.dag_id, - ) - .join( - latest_dag_run_per_dag_id_cte, - DagModel.dag_id == latest_dag_run_per_dag_id_cte.c.dag_id, - isouter=True, - ) - .join( - DagRun, - DagRun.start_date == latest_dag_run_per_dag_id_cte.c.start_date - and DagRun.dag_id == latest_dag_run_per_dag_id_cte.c.dag_id, + if has_max_run_filter or (requested_order_by_set & dag_run_order_by_set): + query = query.join( + max_run_id_query, + DagModel.dag_id == max_run_id_query.c.dag_id, isouter=True, + ).join(DagRun, DagRun.id == max_run_id_query.c.max_dag_run_id, isouter=True) + + if has_max_run_filter: + query = apply_filters_to_select( + statement=query, + filters=max_run_filters, ) - .order_by(DagModel.dag_id) - ) - return dags_with_latest_and_filtered_runs + return query diff --git a/airflow-core/src/airflow/api_fastapi/common/exceptions.py b/airflow-core/src/airflow/api_fastapi/common/exceptions.py index 061eec55d3d84..39909b7a46395 100644 --- a/airflow-core/src/airflow/api_fastapi/common/exceptions.py +++ b/airflow-core/src/airflow/api_fastapi/common/exceptions.py @@ -17,6 +17,8 @@ from __future__ import annotations +import logging +import traceback from abc import ABC, abstractmethod from enum import Enum from typing import Generic, TypeVar @@ -24,8 +26,13 @@ from fastapi import HTTPException, Request, status from sqlalchemy.exc import IntegrityError +from airflow.configuration import conf +from airflow.utils.strings import get_random_string + T = TypeVar("T", bound=Exception) +log = logging.getLogger(__name__) + class BaseErrorHandler(Generic[T], ABC): """Base class for error handlers.""" @@ -61,12 +68,28 @@ def __init__(self): def exception_handler(self, request: Request, exc: IntegrityError): """Handle IntegrityError exception.""" if self._is_dialect_matched(exc): + exception_id = get_random_string() + stacktrace = "" + for tb in traceback.format_tb(exc.__traceback__): + stacktrace += tb + + log_message = f"Error with id {exception_id}\n{stacktrace}" + log.error(log_message) + if conf.get("api", "expose_stacktrace") == "True": + message = log_message + else: + message = ( + "Serious error when handling your request. Check logs for more details - " + f"you will find it in api server when you look for ID {exception_id}" + ) + raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail={ "reason": "Unique constraint violation", "statement": str(exc.statement), "orig_error": str(exc.orig), + "message": message, }, ) diff --git a/airflow-core/src/airflow/api_fastapi/common/headers.py b/airflow-core/src/airflow/api_fastapi/common/headers.py index 7d1a0fa69613b..6bcdfbf67c7dd 100644 --- a/airflow-core/src/airflow/api_fastapi/common/headers.py +++ b/airflow-core/src/airflow/api_fastapi/common/headers.py @@ -47,3 +47,53 @@ def header_accept_json_or_text_depends( HeaderAcceptJsonOrText = Annotated[Mimetype, Depends(header_accept_json_or_text_depends)] + + +def header_accept_json_or_ndjson_depends( + accept: Annotated[ + str, + Header( + json_schema_extra={ + "type": "string", + "enum": [Mimetype.JSON, Mimetype.NDJSON, Mimetype.ANY], + } + ), + ] = Mimetype.ANY, +) -> Mimetype: + if accept.startswith(Mimetype.ANY): + return Mimetype.ANY + if accept.startswith(Mimetype.JSON): + return Mimetype.JSON + if accept.startswith(Mimetype.NDJSON) or accept.startswith(Mimetype.ANY): + return Mimetype.NDJSON + + raise HTTPException( + status_code=status.HTTP_406_NOT_ACCEPTABLE, + detail="Only application/json or application/x-ndjson is supported", + ) + + +HeaderAcceptJsonOrNdjson = Annotated[Mimetype, Depends(header_accept_json_or_ndjson_depends)] + + +def header_content_type_json_or_form_depends( + content_type: Annotated[ + str, + Header( + alias="Content-Type", + description="Content-Type of the request body", + json_schema_extra={"enum": [Mimetype.JSON, Mimetype.FORM]}, + ), + ] = Mimetype.JSON, +) -> Mimetype: + if content_type.startswith(Mimetype.JSON): + return Mimetype.JSON + if content_type.startswith(Mimetype.FORM): + return Mimetype.FORM + raise HTTPException( + status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, + detail="Only application/json or application/x-www-form-urlencoded is supported", + ) + + +HeaderContentTypeJsonOrForm = Annotated[Mimetype, Depends(header_content_type_json_or_form_depends)] diff --git a/airflow-core/src/airflow/api_fastapi/common/parameters.py b/airflow-core/src/airflow/api_fastapi/common/parameters.py index 141447488c26d..46f8eef89a029 100644 --- a/airflow-core/src/airflow/api_fastapi/common/parameters.py +++ b/airflow-core/src/airflow/api_fastapi/common/parameters.py @@ -18,45 +18,46 @@ from __future__ import annotations from abc import ABC, abstractmethod -from collections.abc import Iterable +from collections.abc import Callable, Iterable from datetime import datetime from enum import Enum from typing import ( TYPE_CHECKING, Annotated, Any, - Callable, Generic, Literal, - Optional, TypeVar, - Union, overload, ) from fastapi import Depends, HTTPException, Query, status from pendulum.parsing.exceptions import ParserError from pydantic import AfterValidator, BaseModel, NonNegativeInt -from sqlalchemy import Column, and_, case, or_ +from sqlalchemy import Column, and_, case, func, not_, or_, select from sqlalchemy.inspection import inspect +from airflow._shared.timezones import timezone from airflow.api_fastapi.core_api.base import OrmClause +from airflow.api_fastapi.core_api.security import GetUserDep from airflow.models import Base from airflow.models.asset import ( AssetAliasModel, AssetModel, DagScheduleAssetReference, + TaskInletAssetReference, TaskOutletAssetReference, ) from airflow.models.connection import Connection from airflow.models.dag import DagModel, DagTag +from airflow.models.dag_favorite import DagFavorite from airflow.models.dag_version import DagVersion from airflow.models.dagrun import DagRun +from airflow.models.hitl import HITLDetail from airflow.models.pool import Pool from airflow.models.taskinstance import TaskInstance from airflow.models.variable import Variable from airflow.typing_compat import Self -from airflow.utils import timezone from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType @@ -111,6 +112,38 @@ def depends(cls, offset: NonNegativeInt = 0) -> OffsetFilter: return cls().set_value(offset) +class _FavoriteFilter(BaseParam[bool]): + """Filter DAGs by favorite status.""" + + def __init__(self, user_id: str, value: T | None = None, skip_none: bool = True) -> None: + super().__init__(skip_none=skip_none) + self.user_id = user_id + + def to_orm(self, select_stmt: Select) -> Select: + if self.value is None and self.skip_none: + return select_stmt + + if self.value: + select_stmt = select_stmt.join(DagFavorite, DagFavorite.dag_id == DagModel.dag_id).where( + DagFavorite.user_id == self.user_id + ) + else: + select_stmt = select_stmt.where( + not_( + select(DagFavorite) + .where(and_(DagFavorite.dag_id == DagModel.dag_id, DagFavorite.user_id == self.user_id)) + .exists() + ) + ) + + return select_stmt + + @classmethod + def depends(cls, user: GetUserDep, is_favorite: bool | None = Query(None)) -> _FavoriteFilter: + instance = cls(user_id=str(user.get_id())).set_value(is_favorite) + return instance + + class _ExcludeStaleFilter(BaseParam[bool]): """Filter on is_stale.""" @@ -151,7 +184,14 @@ def search_param_factory( pattern_name: str, skip_none: bool = True, ) -> Callable[[str | None], _SearchParam]: - def depends_search(value: str | None = Query(alias=pattern_name, default=None)) -> _SearchParam: + DESCRIPTION = ( + "SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). " + "Regular expressions are **not** supported." + ) + + def depends_search( + value: str | None = Query(alias=pattern_name, default=None, description=DESCRIPTION), + ) -> _SearchParam: search_parm = _SearchParam(attribute, skip_none) value = search_parm.transform_aliases(value) return search_parm.set_value(value) @@ -159,9 +199,11 @@ def depends_search(value: str | None = Query(alias=pattern_name, default=None)) return depends_search -class SortParam(BaseParam[str]): +class SortParam(BaseParam[list[str]]): """Order result by the attribute.""" + MAX_SORT_PARAMS = 10 + def __init__( self, allowed_attrs: list[str], model: Base, to_replace: dict[str, str | Column] | None = None ) -> None: @@ -175,38 +217,56 @@ def to_orm(self, select: Select) -> Select: raise ValueError(f"Cannot set 'skip_none' to False on a {type(self)}") if self.value is None: - self.value = self.get_primary_key_string() - - lstriped_orderby = self.value.lstrip("-") - column: Column | None = None - if self.to_replace: - replacement = self.to_replace.get(lstriped_orderby, lstriped_orderby) - if isinstance(replacement, str): - lstriped_orderby = replacement - else: - column = replacement + self.value = [self.get_primary_key_string()] - if (self.allowed_attrs and lstriped_orderby not in self.allowed_attrs) and column is None: + order_by_values = self.value + if len(order_by_values) > self.MAX_SORT_PARAMS: raise HTTPException( 400, - f"Ordering with '{lstriped_orderby}' is disallowed or " - f"the attribute does not exist on the model", + f"Ordering with more than {self.MAX_SORT_PARAMS} parameters is not allowed. Provided: {order_by_values}", ) - if column is None: - column = getattr(self.model, lstriped_orderby) - # MySQL does not support `nullslast`, and True/False ordering depends on the - # database implementation. - nullscheck = case((column.isnot(None), 0), else_=1) + columns: list[Column] = [] + for order_by_value in order_by_values: + lstriped_orderby = order_by_value.lstrip("-") + column: Column | None = None + if self.to_replace: + replacement = self.to_replace.get(lstriped_orderby, lstriped_orderby) + if isinstance(replacement, str): + lstriped_orderby = replacement + else: + column = replacement + + if (self.allowed_attrs and lstriped_orderby not in self.allowed_attrs) and column is None: + raise HTTPException( + 400, + f"Ordering with '{lstriped_orderby}' is disallowed or " + f"the attribute does not exist on the model", + ) + if column is None: + column = getattr(self.model, lstriped_orderby) + + # MySQL does not support `nullslast`, and True/False ordering depends on the + # database implementation. + nullscheck = case((column.isnot(None), 0), else_=1) + + columns.append(nullscheck) + if order_by_value.startswith("-"): + columns.append(column.desc()) + else: + columns.append(column.asc()) # Reset default sorting select = select.order_by(None) primary_key_column = self.get_primary_key_column() + # Always add a final discriminator to enforce deterministic ordering. + if order_by_values and order_by_values[0].startswith("-"): + columns.append(primary_key_column.desc()) + else: + columns.append(primary_key_column.asc()) - if self.value[0] == "-": - return select.order_by(nullscheck, column.desc(), primary_key_column.desc()) - return select.order_by(nullscheck, column.asc(), primary_key_column.asc()) + return select.order_by(*columns) def get_primary_key_column(self) -> Column: """Get the primary key column of the model of SortParam object.""" @@ -221,8 +281,12 @@ def depends(cls, *args: Any, **kwargs: Any) -> Self: raise NotImplementedError("Use dynamic_depends, depends not implemented.") def dynamic_depends(self, default: str | None = None) -> Callable: - def inner(order_by: str = default or self.get_primary_key_string()) -> SortParam: - return self.set_value(self.get_primary_key_string() if order_by == "" else order_by) + def inner( + order_by: list[str] = Query( + default=[default] if default is not None else [self.get_primary_key_string()] + ), + ) -> SortParam: + return self.set_value(order_by) return inner @@ -424,7 +488,7 @@ class _DagIdAssetReferenceFilter(BaseParam[list[str]]): """Search on dag_id.""" def __init__(self, skip_none: bool = True) -> None: - super().__init__(AssetModel.consuming_dags, skip_none) + super().__init__(AssetModel.scheduled_dags, skip_none) @classmethod def depends(cls, dag_ids: list[str] = Query(None)) -> _DagIdAssetReferenceFilter: @@ -437,8 +501,9 @@ def to_orm(self, select: Select) -> Select: if self.value is None and self.skip_none: return select return select.where( - (AssetModel.consuming_dags.any(DagScheduleAssetReference.dag_id.in_(self.value))) + (AssetModel.scheduled_dags.any(DagScheduleAssetReference.dag_id.in_(self.value))) | (AssetModel.producing_tasks.any(TaskOutletAssetReference.dag_id.in_(self.value))) + | (AssetModel.consuming_tasks.any(TaskInletAssetReference.dag_id.in_(self.value))) ) @@ -484,9 +549,12 @@ def depends_datetime( lower_bound: datetime | None = Query(alias=f"{filter_name}_gte", default=None), upper_bound: datetime | None = Query(alias=f"{filter_name}_lte", default=None), ) -> RangeFilter: + attr = getattr(model, attribute_name or filter_name) + if filter_name in ("start_date", "end_date"): + attr = func.coalesce(attr, func.now()) return RangeFilter( Range(lower_bound=lower_bound, upper_bound=upper_bound), - getattr(model, attribute_name or filter_name), + attr, ) return depends_datetime @@ -508,15 +576,16 @@ def depends_float( # Common Safe DateTime DateTimeQuery = Annotated[str, AfterValidator(_safe_parse_datetime)] -OptionalDateTimeQuery = Annotated[Union[str, None], AfterValidator(_safe_parse_datetime_optional)] +OptionalDateTimeQuery = Annotated[str | None, AfterValidator(_safe_parse_datetime_optional)] # DAG QueryLimit = Annotated[LimitFilter, Depends(LimitFilter.depends)] QueryOffset = Annotated[OffsetFilter, Depends(OffsetFilter.depends)] QueryPausedFilter = Annotated[ - FilterParam[Optional[bool]], - Depends(filter_param_factory(DagModel.is_paused, Optional[bool], filter_name="paused")), + FilterParam[bool | None], + Depends(filter_param_factory(DagModel.is_paused, bool | None, filter_name="paused")), ] +QueryFavoriteFilter = Annotated[_FavoriteFilter, Depends(_FavoriteFilter.depends)] QueryExcludeStaleFilter = Annotated[_ExcludeStaleFilter, Depends(_ExcludeStaleFilter.depends)] QueryDagIdPatternSearch = Annotated[ _SearchParam, Depends(search_param_factory(DagModel.dag_id, "dag_id_pattern")) @@ -524,6 +593,14 @@ def depends_float( QueryDagDisplayNamePatternSearch = Annotated[ _SearchParam, Depends(search_param_factory(DagModel.dag_display_name, "dag_display_name_pattern")) ] +QueryBundleNameFilter = Annotated[ + FilterParam[str | None], + Depends(filter_param_factory(DagModel.bundle_name, str | None, filter_name="bundle_name")), +] +QueryBundleVersionFilter = Annotated[ + FilterParam[str | None], + Depends(filter_param_factory(DagModel.bundle_version, str | None, filter_name="bundle_version")), +] QueryDagIdPatternSearchWithNone = Annotated[ _SearchParam, Depends(search_param_factory(DagModel.dag_id, "dag_id_pattern", False)) ] @@ -532,8 +609,8 @@ def depends_float( # DagRun QueryLastDagRunStateFilter = Annotated[ - FilterParam[Optional[DagRunState]], - Depends(filter_param_factory(DagRun.state, Optional[DagRunState], filter_name="last_dag_run_state")), + FilterParam[DagRunState | None], + Depends(filter_param_factory(DagRun.state, DagRunState | None, filter_name="last_dag_run_state")), ] @@ -601,7 +678,7 @@ def _transform_ti_states(states: list[str] | None) -> list[TaskInstanceState | N return None try: - return [None if s in ("none", None) else TaskInstanceState(s) for s in states] + return [None if s in ("no_status", "none", None) else TaskInstanceState(s) for s in states] except ValueError: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, @@ -684,8 +761,8 @@ def _optional_boolean(value: bool | None) -> bool | None: return value if value is not None else False -QueryIncludeUpstream = Annotated[Union[bool], AfterValidator(_optional_boolean)] -QueryIncludeDownstream = Annotated[Union[bool], AfterValidator(_optional_boolean)] +QueryIncludeUpstream = Annotated[bool, AfterValidator(_optional_boolean)] +QueryIncludeDownstream = Annotated[bool, AfterValidator(_optional_boolean)] state_priority: list[None | TaskInstanceState] = [ TaskInstanceState.FAILED, @@ -707,3 +784,93 @@ def _optional_boolean(value: bool | None) -> bool | None: QueryConnectionIdPatternSearch = Annotated[ _SearchParam, Depends(search_param_factory(Connection.conn_id, "connection_id_pattern")) ] + +# Human in the loop +QueryHITLDetailDagIdPatternSearch = Annotated[ + _SearchParam, + Depends( + search_param_factory( + TaskInstance.dag_id, + "dag_id_pattern", + ) + ), +] +QueryHITLDetailTaskIdPatternSearch = Annotated[ + _SearchParam, + Depends( + search_param_factory( + TaskInstance.task_id, + "task_id_pattern", + ) + ), +] +QueryHITLDetailDagRunIdFilter = Annotated[ + FilterParam[str], + Depends( + filter_param_factory( + TaskInstance.run_id, + str, + filter_name="dag_run_id", + ), + ), +] +QueryHITLDetailSubjectSearch = Annotated[ + _SearchParam, + Depends( + search_param_factory( + HITLDetail.subject, + "subject_search", + ) + ), +] +QueryHITLDetailBodySearch = Annotated[ + _SearchParam, + Depends( + search_param_factory( + HITLDetail.body, + "body_search", + ) + ), +] +QueryHITLDetailResponseReceivedFilter = Annotated[ + FilterParam[bool | None], + Depends( + filter_param_factory( + HITLDetail.response_received, + bool | None, + filter_name="response_received", + ) + ), +] +QueryHITLDetailUserIdFilter = Annotated[ + FilterParam[list[str]], + Depends( + filter_param_factory( + HITLDetail.user_id, + list[str], + FilterOptionEnum.ANY_EQUAL, + default_factory=list, + filter_name="user_id", + ) + ), +] +QueryHITLDetailDagIdFilter = Annotated[ + FilterParam[str | None], + Depends( + filter_param_factory( + TaskInstance.dag_id, + str | None, + filter_name="dag_id", + ) + ), +] +QueryHITLDetailTaskIdFilter = Annotated[ + FilterParam[str | None], + Depends( + filter_param_factory( + TaskInstance.task_id, + str | None, + filter_name="task_id", + ) + ), +] diff --git a/airflow-core/src/airflow/api_fastapi/common/router.py b/airflow-core/src/airflow/api_fastapi/common/router.py index aeb1fb22452b5..e01b8462ba8bf 100644 --- a/airflow-core/src/airflow/api_fastapi/common/router.py +++ b/airflow-core/src/airflow/api_fastapi/common/router.py @@ -17,17 +17,11 @@ from __future__ import annotations -from collections.abc import Sequence -from enum import Enum -from typing import Any, Callable +from collections.abc import Callable +from typing import Any -from fastapi import APIRouter, params -from fastapi.datastructures import Default -from fastapi.routing import APIRoute -from fastapi.types import DecoratedCallable, IncEx -from fastapi.utils import generate_unique_id -from starlette.responses import JSONResponse, Response -from starlette.routing import BaseRoute +from fastapi import APIRouter +from fastapi.types import DecoratedCallable class AirflowRouter(APIRouter): @@ -36,58 +30,15 @@ class AirflowRouter(APIRouter): def api_route( self, path: str, - *, - response_model: Any = Default(None), - status_code: int | None = None, - tags: list[str | Enum] | None = None, - dependencies: Sequence[params.Depends] | None = None, - summary: str | None = None, - description: str | None = None, - response_description: str = "Successful Response", - responses: dict[int | str, dict[str, Any]] | None = None, - deprecated: bool | None = None, - methods: list[str] | None = None, operation_id: str | None = None, - response_model_include: IncEx | None = None, - response_model_exclude: IncEx | None = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: type[Response] = Default(JSONResponse), - name: str | None = None, - callbacks: list[BaseRoute] | None = None, - openapi_extra: dict[str, Any] | None = None, - generate_unique_id_function: Callable[[APIRoute], str] = Default(generate_unique_id), + **kwargs: Any, ) -> Callable[[DecoratedCallable], DecoratedCallable]: def decorator(func: DecoratedCallable) -> DecoratedCallable: self.add_api_route( path, func, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=methods, operation_id=operation_id or func.__name__, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, + **kwargs, ) return func diff --git a/airflow-core/src/airflow/api_fastapi/common/types.py b/airflow-core/src/airflow/api_fastapi/common/types.py index 0b431dfdef466..c5df6259f4d5b 100644 --- a/airflow-core/src/airflow/api_fastapi/common/types.py +++ b/airflow-core/src/airflow/api_fastapi/common/types.py @@ -30,7 +30,7 @@ ConfigDict, ) -from airflow.utils import timezone +from airflow._shared.timezones import timezone UtcDateTime = Annotated[AwareDatetime, AfterValidator(lambda d: d.astimezone(timezone.utc))] """UTCDateTime is a datetime with timezone information""" @@ -72,6 +72,8 @@ class Mimetype(str, Enum): TEXT = "text/plain" JSON = "application/json" + FORM = "application/x-www-form-urlencoded" + NDJSON = "application/x-ndjson" ANY = "*/*" @@ -86,6 +88,7 @@ class ExtraMenuItem: class MenuItem(Enum): """Define all menu items defined in the menu.""" + REQUIRED_ACTIONS = "Required Actions" ASSETS = "Assets" AUDIT_LOG = "Audit Log" CONFIG = "Config" diff --git a/airflow-core/src/airflow/api_fastapi/core_api/app.py b/airflow-core/src/airflow/api_fastapi/core_api/app.py index 49f522994313a..67a3223fbfa8f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/app.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/app.py @@ -18,6 +18,7 @@ import logging import os +import sys import warnings from pathlib import Path @@ -30,12 +31,13 @@ from starlette.templating import Jinja2Templates from airflow.api_fastapi.auth.tokens import get_signing_key -from airflow.api_fastapi.core_api.middleware import FlaskExceptionsMiddleware from airflow.exceptions import AirflowException from airflow.settings import AIRFLOW_PATH log = logging.getLogger(__name__) +PY313 = sys.version_info >= (3, 13) + def init_views(app: FastAPI) -> None: """Init views by registering the different routers.""" @@ -55,6 +57,13 @@ def init_views(app: FastAPI) -> None: templates = Jinja2Templates(directory=directory) + if dev_mode: + app.mount( + "/static/i18n/locales", + StaticFiles(directory=Path(AIRFLOW_PATH) / "airflow/ui/public/i18n/locales"), + name="dev_i18n_static", + ) + app.mount( "/static", StaticFiles( @@ -118,6 +127,13 @@ def init_flask_plugins(app: FastAPI) -> None: try: from airflow.providers.fab.www.app import create_app except ImportError: + if PY313: + log.info( + "Some Airflow 2 plugins have been detected in your environment. Currently FAB provider " + "does not support Python 3.13, so you cannot use Airflow 2 plugins with Airflow 3 until " + "FAB provider will be Python 3.13 compatible." + ) + return raise AirflowException( "Some Airflow 2 plugins have been detected in your environment. " "To run them with Airflow 3, you must install the FAB provider in your Airflow environment." @@ -155,7 +171,7 @@ def init_config(app: FastAPI) -> None: # and 9 (slowest, most compression) app.add_middleware(GZipMiddleware, minimum_size=1024, compresslevel=5) - app.state.secret_key = get_signing_key("webserver", "secret_key") + app.state.secret_key = get_signing_key("api", "secret_key") def init_error_handlers(app: FastAPI) -> None: @@ -167,4 +183,18 @@ def init_error_handlers(app: FastAPI) -> None: def init_middlewares(app: FastAPI) -> None: - app.add_middleware(FlaskExceptionsMiddleware) + from airflow.configuration import conf + + if "SimpleAuthManager" in conf.get("core", "auth_manager") and conf.getboolean( + "core", "simple_auth_manager_all_admins" + ): + from airflow.api_fastapi.auth.managers.simple.middleware import SimpleAllAdminMiddleware + + app.add_middleware(SimpleAllAdminMiddleware) + + +def init_ui_plugins(app: FastAPI) -> None: + """Initialize UI plugins.""" + from airflow import plugins_manager + + plugins_manager.initialize_ui_plugins() diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/assets.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/assets.py index 70e72474d5da1..9199bed078c99 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/assets.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/assets.py @@ -19,7 +19,7 @@ from datetime import datetime -from pydantic import Field, field_validator +from pydantic import AliasPath, Field, NonNegativeInt, field_validator from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel from airflow.sdk.execution_time.secrets_masker import redact @@ -33,6 +33,15 @@ class DagScheduleAssetReference(StrictBaseModel): updated_at: datetime +class TaskInletAssetReference(StrictBaseModel): + """Task inlet reference serializer for assets.""" + + dag_id: str + task_id: str + created_at: datetime + updated_at: datetime + + class TaskOutletAssetReference(StrictBaseModel): """Task outlet reference serializer for assets.""" @@ -42,6 +51,13 @@ class TaskOutletAssetReference(StrictBaseModel): updated_at: datetime +class LastAssetEventResponse(BaseModel): + """Last asset event response serializer.""" + + id: NonNegativeInt | None = None + timestamp: datetime | None = None + + class AssetResponse(BaseModel): """Asset serializer for responses.""" @@ -52,9 +68,11 @@ class AssetResponse(BaseModel): extra: dict | None = None created_at: datetime updated_at: datetime - consuming_dags: list[DagScheduleAssetReference] + scheduled_dags: list[DagScheduleAssetReference] producing_tasks: list[TaskOutletAssetReference] + consuming_tasks: list[TaskInletAssetReference] aliases: list[AssetAliasResponse] + last_asset_event: LastAssetEventResponse | None = None @field_validator("extra", mode="after") @classmethod @@ -132,6 +150,7 @@ class QueuedEventResponse(BaseModel): dag_id: str asset_id: int created_at: datetime + dag_display_name: str = Field(validation_alias=AliasPath("dag_model", "dag_display_name")) class QueuedEventCollectionResponse(BaseModel): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/backfills.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/backfills.py index c74a7e2020313..aa3a7fd0ef9b7 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/backfills.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/backfills.py @@ -19,6 +19,8 @@ from datetime import datetime +from pydantic import AliasPath, Field, NonNegativeInt + from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel from airflow.models.backfill import ReprocessBehavior @@ -38,7 +40,7 @@ class BackfillPostBody(StrictBaseModel): class BackfillResponse(BaseModel): """Base serializer for Backfill.""" - id: int + id: NonNegativeInt dag_id: str from_date: datetime to_date: datetime @@ -49,6 +51,7 @@ class BackfillResponse(BaseModel): created_at: datetime completed_at: datetime | None updated_at: datetime + dag_display_name: str = Field(validation_alias=AliasPath("dag_model", "dag_display_name")) class BackfillCollectionResponse(BaseModel): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/common.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/common.py index 3c7a04255c963..68cfd10a49f48 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/common.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/common.py @@ -23,7 +23,7 @@ from __future__ import annotations import enum -from typing import Annotated, Any, Generic, TypeVar, Union +from typing import Annotated, Any, Generic, Literal, TypeVar, Union from pydantic import Discriminator, Field, Tag @@ -34,7 +34,7 @@ K = TypeVar("K") -class BulkAction(enum.Enum): +class BulkAction(str, enum.Enum): """Bulk Action to be performed on the used model.""" CREATE = "create" @@ -66,6 +66,7 @@ class BulkBaseAction(StrictBaseModel, Generic[T]): class BulkCreateAction(BulkBaseAction[T]): """Bulk Create entity serializer for request bodies.""" + action: Literal[BulkAction.CREATE] = Field(description="The action to be performed on the entities.") entities: list[T] = Field(..., description="A list of entities to be created.") action_on_existence: BulkActionOnExistence = BulkActionOnExistence.FAIL @@ -73,6 +74,7 @@ class BulkCreateAction(BulkBaseAction[T]): class BulkUpdateAction(BulkBaseAction[T]): """Bulk Update entity serializer for request bodies.""" + action: Literal[BulkAction.UPDATE] = Field(description="The action to be performed on the entities.") entities: list[T] = Field(..., description="A list of entities to be updated.") action_on_non_existence: BulkActionNotOnExistence = BulkActionNotOnExistence.FAIL @@ -80,6 +82,7 @@ class BulkUpdateAction(BulkBaseAction[T]): class BulkDeleteAction(BulkBaseAction[T]): """Bulk Delete entity serializer for request bodies.""" + action: Literal[BulkAction.DELETE] = Field(description="The action to be performed on the entities.") entities: list[str] = Field(..., description="A list of entity id/key to be deleted.") action_on_non_existence: BulkActionNotOnExistence = BulkActionNotOnExistence.FAIL diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py index fee330e1fd1d6..e29d002c26ff4 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py @@ -47,7 +47,7 @@ class ConnectionResponse(BaseModel): def redact_password(cls, v: str | None, field_info: ValidationInfo) -> str | None: if v is None: return None - return redact(v, field_info.field_name) + return str(redact(v, field_info.field_name)) @field_validator("extra", mode="before") @classmethod @@ -136,3 +136,26 @@ class ConnectionBody(StrictBaseModel): port: int | None = Field(default=None) password: str | None = Field(default=None) extra: str | None = Field(default=None) + + @field_validator("extra") + @classmethod + def validate_extra(cls, v: str | None) -> str | None: + """ + Validate that `extra` field is a JSON-encoded Python dict. + + If `extra` field is not a valid JSON, it will be returned as is. + """ + if v is None: + return v + if v == "": + return "{}" # Backward compatibility: treat "" as empty JSON object + try: + extra_dict = json.loads(v) + if not isinstance(extra_dict, dict): + raise ValueError("The `extra` field must be a valid JSON object (e.g., {'key': 'value'})") + except json.JSONDecodeError: + raise ValueError( + "The `extra` field must be a valid JSON object (e.g., {'key': 'value'}), " + "but encountered non-JSON in `extra` field" + ) + return v diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_run.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_run.py index a6bcf3361adf4..cc128809d4355 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_run.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_run.py @@ -21,13 +21,13 @@ from enum import Enum from typing import TYPE_CHECKING -from pydantic import AwareDatetime, Field, NonNegativeInt, model_validator +from pydantic import AliasPath, AwareDatetime, Field, NonNegativeInt, model_validator +from airflow._shared.timezones import timezone from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel from airflow.api_fastapi.core_api.datamodels.dag_versions import DagVersionResponse from airflow.models import DagRun from airflow.timetables.base import DataInterval -from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -55,6 +55,10 @@ class DAGRunClearBody(StrictBaseModel): dry_run: bool = True only_failed: bool = False + run_on_latest_version: bool = Field( + default=False, + description="(Experimental) Run on the latest bundle version of the Dag after clearing the Dag Run.", + ) class DAGRunResponse(BaseModel): @@ -66,6 +70,7 @@ class DAGRunResponse(BaseModel): queued_at: datetime | None start_date: datetime | None end_date: datetime | None + duration: float | None data_interval_start: datetime | None data_interval_end: datetime | None run_after: datetime @@ -73,9 +78,12 @@ class DAGRunResponse(BaseModel): run_type: DagRunType state: DagRunState triggered_by: DagRunTriggeredByType | None - conf: dict + triggering_user_name: str | None + conf: dict | None note: str | None dag_versions: list[DagVersionResponse] + bundle_version: str | None + dag_display_name: str = Field(validation_alias=AliasPath("dag_model", "dag_display_name")) class DAGRunCollectionResponse(BaseModel): @@ -94,7 +102,7 @@ class TriggerDAGRunPostBody(StrictBaseModel): logical_date: AwareDatetime | None run_after: datetime | None = Field(default_factory=timezone.utcnow) - conf: dict = Field(default_factory=dict) + conf: dict | None = Field(default_factory=dict) note: str | None = None @model_validator(mode="after") diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_sources.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_sources.py index 6db3f334b805c..c41d55f9aa2fb 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_sources.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_sources.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +from pydantic import AliasPath, Field + from airflow.api_fastapi.core_api.base import BaseModel @@ -25,3 +27,4 @@ class DAGSourceResponse(BaseModel): content: str | None dag_id: str version_number: int | None + dag_display_name: str = Field(validation_alias=AliasPath("dag_model", "dag_display_name")) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_stats.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_stats.py index 1effdd5a94f7a..921886b37c4b6 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_stats.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_stats.py @@ -17,6 +17,8 @@ from __future__ import annotations +from pydantic import AliasPath, Field + from airflow.api_fastapi.core_api.base import BaseModel from airflow.utils.state import DagRunState @@ -32,6 +34,7 @@ class DagStatsResponse(BaseModel): """DAG Stats serializer for responses.""" dag_id: str + dag_display_name: str = Field(validation_alias=AliasPath("dag_model", "dag_display_name")) stats: list[DagStatsStateResponse] diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py index 47a28b7528477..2475d49031fa1 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py @@ -19,7 +19,8 @@ from datetime import datetime from uuid import UUID -from pydantic import computed_field +from pydantic import AliasPath, Field, computed_field +from sqlalchemy import select from airflow.api_fastapi.core_api.base import BaseModel from airflow.dag_processing.bundles.manager import DagBundlesManager @@ -34,13 +35,30 @@ class DagVersionResponse(BaseModel): bundle_name: str | None bundle_version: str | None created_at: datetime + dag_display_name: str = Field(validation_alias=AliasPath("dag_model", "dag_display_name")) # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def bundle_url(self) -> str | None: if self.bundle_name: - return DagBundlesManager().view_url(self.bundle_name, self.bundle_version) + # Get the bundle model from the database and render the URL + from airflow.models.dagbundle import DagBundleModel + from airflow.utils.session import create_session + + with create_session() as session: + bundle_model = session.scalar( + select(DagBundleModel).where(DagBundleModel.name == self.bundle_name) + ) + + if bundle_model and hasattr(bundle_model, "signed_url_template"): + return bundle_model.render_url(self.bundle_version) + # fallback to the deprecated option if the bundle model does not have a signed_url_template + # attribute + try: + return DagBundlesManager().view_url(self.bundle_name, self.bundle_version) + except ValueError: + return None return None diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py index 8f849a259f23b..572b62b4818ce 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py @@ -17,6 +17,7 @@ from __future__ import annotations +import inspect from collections import abc from collections.abc import Iterable from datetime import datetime, timedelta @@ -62,6 +63,7 @@ class DAGResponse(BaseModel): last_parsed_time: datetime | None last_expired: datetime | None bundle_name: str | None + bundle_version: str | None relative_fileloc: str | None fileloc: str description: str | None @@ -89,7 +91,7 @@ def get_owners(cls, v: Any) -> list[str] | None: if v is None: return [] if isinstance(v, str): - return v.split(",") + return [x.strip() for x in v.split(",")] return v @field_validator("timetable_summary", mode="before") @@ -101,11 +103,11 @@ def get_timetable_summary(cls, tts: str | None) -> str | None: return str(tts) # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def file_token(self) -> str: """Return file token.""" - serializer = URLSafeSerializer(conf.get_mandatory_value("webserver", "secret_key")) + serializer = URLSafeSerializer(conf.get_mandatory_value("api", "secret_key")) payload = { "bundle_name": self.bundle_name, "relative_fileloc": self.relative_fileloc, @@ -153,6 +155,8 @@ class DAGDetailsResponse(DAGResponse): template_search_path: Iterable[str] | None timezone: str | None last_parsed: datetime | None + default_args: abc.Mapping | None + owner_links: dict[str, str] | None = None @field_validator("timezone", mode="before") @classmethod @@ -162,6 +166,14 @@ def get_timezone(cls, tz: Timezone | FixedTimezone) -> str | None: return None return str(tz) + @field_validator("doc_md", mode="before") + @classmethod + def get_doc_md(cls, doc_md: str | None) -> str | None: + """Clean indentation in doc md.""" + if doc_md is None: + return None + return inspect.cleandoc(doc_md) + @field_validator("params", mode="before") @classmethod def get_params(cls, params: abc.MutableMapping | None) -> dict | None: @@ -171,18 +183,18 @@ def get_params(cls, params: abc.MutableMapping | None) -> dict | None: return {k: v.dump() for k, v in params.items()} # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def concurrency(self) -> int: """Return max_active_tasks as concurrency.""" return self.max_active_tasks # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def latest_dag_version(self) -> DagVersionResponse | None: """Return the latest DagVersion.""" - latest_dag_version = DagVersion.get_latest_version(self.dag_id) + latest_dag_version = DagVersion.get_latest_version(self.dag_id, load_dag_model=True) if latest_dag_version is None: return latest_dag_version return DagVersionResponse.model_validate(latest_dag_version) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/event_logs.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/event_logs.py index 26a1364a2db98..d7355bc1294b1 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/event_logs.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/event_logs.py @@ -19,7 +19,7 @@ from datetime import datetime -from pydantic import Field +from pydantic import AliasPath, Field from airflow.api_fastapi.core_api.base import BaseModel @@ -38,6 +38,9 @@ class EventLogResponse(BaseModel): logical_date: datetime | None owner: str | None extra: str | None + dag_display_name: str | None = Field( + validation_alias=AliasPath("dag_model", "dag_display_name"), default=None + ) class EventLogCollectionResponse(BaseModel): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py new file mode 100644 index 0000000000000..6dd83ff495bcd --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py @@ -0,0 +1,78 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from collections.abc import Mapping +from datetime import datetime +from typing import Any + +from pydantic import Field, field_validator + +from airflow.api_fastapi.core_api.base import BaseModel +from airflow.api_fastapi.core_api.datamodels.task_instances import TaskInstanceResponse +from airflow.sdk import Param + + +class UpdateHITLDetailPayload(BaseModel): + """Schema for updating the content of a Human-in-the-loop detail.""" + + chosen_options: list[str] = Field(min_length=1) + params_input: Mapping = Field(default_factory=dict) + + +class HITLDetailResponse(BaseModel): + """Response of updating a Human-in-the-loop detail.""" + + user_id: str + response_at: datetime + chosen_options: list[str] = Field(min_length=1) + params_input: Mapping = Field(default_factory=dict) + + +class HITLDetail(BaseModel): + """Schema for Human-in-the-loop detail.""" + + task_instance: TaskInstanceResponse + + # User Request Detail + options: list[str] = Field(min_length=1) + subject: str + body: str | None = None + defaults: list[str] | None = None + multiple: bool = False + params: dict[str, Any] = Field(default_factory=dict) + + # Response Content Detail + user_id: str | None = None + response_at: datetime | None = None + chosen_options: list[str] | None = None + params_input: dict[str, Any] = Field(default_factory=dict) + + response_received: bool = False + + @field_validator("params", mode="before") + @classmethod + def get_params(cls, params: dict[str, Any]) -> dict[str, Any]: + """Convert params attribute to dict representation.""" + return {k: v.dump() if isinstance(v, Param) else v for k, v in params.items()} + + +class HITLDetailCollection(BaseModel): + """Schema for a collection of Human-in-the-loop details.""" + + hitl_details: list[HITLDetail] + total_entries: int diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/job.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/job.py index d797464ee907b..c06ac730c2b93 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/job.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/job.py @@ -18,6 +18,8 @@ from datetime import datetime +from pydantic import AliasPath, Field + from airflow.api_fastapi.core_api.base import BaseModel @@ -34,6 +36,9 @@ class JobResponse(BaseModel): executor_class: str | None hostname: str | None unixname: str | None + dag_display_name: str | None = Field( + validation_alias=AliasPath("dag_model", "dag_display_name"), default=None + ) class JobCollectionResponse(BaseModel): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/log.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/log.py index e67264ae3c315..aa1298d174511 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/log.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/log.py @@ -42,3 +42,9 @@ class TaskInstancesLogResponse(BaseModel): content: list[StructuredLogMessage] | list[str] """Either a list of parsed events, or a list of lines on parse error""" continuation_token: str | None + + +class ExternalLogUrlResponse(BaseModel): + """Response for the external log URL endpoint.""" + + url: str diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/plugins.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/plugins.py index 1aab230ac3df5..8a2873e6745a4 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/plugins.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/plugins.py @@ -17,9 +17,9 @@ from __future__ import annotations -from typing import Annotated, Any +from typing import Annotated, Any, Literal -from pydantic import BeforeValidator, ConfigDict, field_validator +from pydantic import BeforeValidator, ConfigDict, Field, field_validator, model_validator from airflow.api_fastapi.core_api.base import BaseModel from airflow.plugins_manager import AirflowPluginSource @@ -65,10 +65,43 @@ class AppBuilderMenuItemResponse(BaseModel): model_config = ConfigDict(extra="allow") name: str - href: str | None = None + href: str category: str | None = None +BaseDestinationLiteral = Literal["nav", "dag", "dag_run", "task", "task_instance"] + + +class BaseUIResponse(BaseModel): + """Base serializer for UI Plugin responses.""" + + model_config = ConfigDict(extra="allow") + + name: str + icon: str | None = None + icon_dark_mode: str | None = None + url_route: str | None = None + category: str | None = None + + +class ExternalViewResponse(BaseUIResponse): + """Serializer for External View Plugin responses.""" + + model_config = ConfigDict(extra="allow") + + href: str + destination: BaseDestinationLiteral = "nav" + + +class ReactAppResponse(BaseUIResponse): + """Serializer for React App Plugin responses.""" + + model_config = ConfigDict(extra="allow") + + bundle_url: str + destination: Literal[BaseDestinationLiteral, "dashboard"] = "nav" + + class PluginResponse(BaseModel): """Plugin serializer.""" @@ -77,8 +110,14 @@ class PluginResponse(BaseModel): flask_blueprints: list[str] fastapi_apps: list[FastAPIAppResponse] fastapi_root_middlewares: list[FastAPIRootMiddlewareResponse] + external_views: list[ExternalViewResponse] = Field( + description="Aggregate all external views. Both 'external_views' and 'appbuilder_menu_items' are included here." + ) + react_apps: list[ReactAppResponse] appbuilder_views: list[AppBuilderViewResponse] - appbuilder_menu_items: list[AppBuilderMenuItemResponse] + appbuilder_menu_items: list[AppBuilderMenuItemResponse] = Field( + deprecated="Kept for backward compatibility, use `external_views` instead.", + ) global_operator_extra_links: list[str] operator_extra_links: list[str] source: Annotated[str, BeforeValidator(coerce_to_string)] @@ -92,9 +131,29 @@ def convert_source(cls, data: Any) -> Any: return str(data) return data + @model_validator(mode="before") + @classmethod + def convert_external_views(cls, data: Any) -> Any: + data["external_views"] = [*data["external_views"], *data["appbuilder_menu_items"]] + return data + class PluginCollectionResponse(BaseModel): """Plugin Collection serializer.""" plugins: list[PluginResponse] total_entries: int + + +class PluginImportErrorResponse(BaseModel): + """Plugin Import Error serializer for responses.""" + + source: str + error: str + + +class PluginImportErrorCollectionResponse(BaseModel): + """Plugin Import Error Collection serializer.""" + + import_errors: list[PluginImportErrorResponse] + total_entries: int diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py index 2e7ae13cfcdb2..4342d04c1e125 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/pools.py @@ -17,7 +17,8 @@ from __future__ import annotations -from typing import Annotated, Callable +from collections.abc import Callable +from typing import Annotated from pydantic import BeforeValidator, Field diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py index d5f561e44ff8b..fb87fb66c0aad 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py @@ -55,6 +55,7 @@ class TaskInstanceResponse(BaseModel): try_number: int max_tries: int task_display_name: str + dag_display_name: str = Field(validation_alias=AliasPath("dag_run", "dag_model", "dag_display_name")) hostname: str | None unixname: str | None pool: str @@ -140,6 +141,7 @@ class TaskInstanceHistoryResponse(BaseModel): try_number: int max_tries: int task_display_name: str + dag_display_name: str = Field(validation_alias=AliasPath("dag_run", "dag_model", "dag_display_name")) hostname: str | None unixname: str | None pool: str @@ -177,6 +179,11 @@ class ClearTaskInstancesBody(StrictBaseModel): include_downstream: bool = False include_future: bool = False include_past: bool = False + run_on_latest_version: bool = Field( + default=False, + description="(Experimental) Run on the latest bundle version of the dag after " + "clearing the task instances.", + ) @model_validator(mode="before") @classmethod @@ -222,3 +229,10 @@ def validate_new_state(cls, ns: str | None) -> str: if ns not in valid_states: raise ValueError(f"'{ns}' is not one of {valid_states}") return ns + + +class BulkTaskInstanceBody(PatchTaskInstanceBody, StrictBaseModel): + """Request body for bulk update, and delete task instances.""" + + task_id: str + map_index: int | None = None diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py index 13a9af7043680..d2e8e285fb3ce 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py @@ -100,7 +100,7 @@ def get_params(cls, params: abc.MutableMapping | None) -> dict | None: return {param_name: param_val.dump() for param_name, param_val in params.items()} # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def extra_links(self) -> list[str]: """Extract and return extra_links.""" diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py new file mode 100644 index 0000000000000..c3cc5a53d8cd1 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from typing import Literal + +from pydantic import BaseModel + +from airflow.utils.state import DagRunState + + +class CalendarTimeRangeResponse(BaseModel): + """Represents a summary of DAG runs for a specific calendar time range.""" + + date: datetime + state: Literal[ + DagRunState.QUEUED, + DagRunState.RUNNING, + DagRunState.SUCCESS, + DagRunState.FAILED, + "planned", + ] + count: int + + +class CalendarTimeRangeCollectionResponse(BaseModel): + """Response model for calendar time range results.""" + + total_entries: int + dag_runs: list[CalendarTimeRangeResponse] diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py index cc4d7913b2244..2dee03e6510cc 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py @@ -17,9 +17,15 @@ from __future__ import annotations +from datetime import datetime from typing import Generic, Literal, TypeVar +from pydantic import computed_field + +from airflow._shared.timezones import timezone from airflow.api_fastapi.core_api.base import BaseModel +from airflow.utils.state import TaskInstanceState +from airflow.utils.types import DagRunType class BaseEdgeResponse(BaseModel): @@ -52,6 +58,36 @@ class BaseNodeResponse(BaseModel): N = TypeVar("N", bound=BaseNodeResponse) +class GridNodeResponse(BaseModel): + """Base Node serializer for responses.""" + + id: str + label: str + children: list[GridNodeResponse] | None = None + is_mapped: bool | None + setup_teardown_type: Literal["setup", "teardown"] | None = None + + +class GridRunsResponse(BaseModel): + """Base Node serializer for responses.""" + + dag_id: str + run_id: str + queued_at: datetime | None + start_date: datetime | None + end_date: datetime | None + run_after: datetime + state: TaskInstanceState | None + run_type: DagRunType + + @computed_field + def duration(self) -> int: + if self.start_date: + end_date = self.end_date or timezone.utcnow() + return (end_date - self.start_date).seconds + return 0 + + class BaseGraphResponse(BaseModel, Generic[E, N]): """Base Graph serializer for responses.""" diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py index e7c39b3f3a154..5644f49e0cfa2 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py @@ -24,20 +24,14 @@ class ConfigResponse(BaseModel): """configuration serializer.""" - navbar_color: str - navbar_text_color: str - navbar_hover_color: str - navbar_text_hover_color: str page_size: int auto_refresh_interval: int hide_paused_dags_by_default: bool instance_name: str - instance_name_has_markup: bool enable_swagger_ui: bool require_confirmation_dag_change: bool default_wrap: bool - warn_deployment_exposure: bool - audit_view_excluded_events: str - audit_view_included_events: str test_connection: str dashboard_alert: list[UIAlert] + show_external_log_redirect: bool + external_log_name: str | None = None diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dag_runs.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dag_runs.py new file mode 100644 index 0000000000000..d99761f745753 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dag_runs.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime + +from airflow.api_fastapi.core_api.base import BaseModel +from airflow.utils.state import DagRunState + + +class DAGRunLightResponse(BaseModel): + """DAG Run serializer for responses.""" + + id: int + dag_id: str + run_id: str + logical_date: datetime | None + run_after: datetime + start_date: datetime | None + end_date: datetime | None + state: DagRunState diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dashboard.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dashboard.py index ad80685882829..bf2afa9fcd3c8 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dashboard.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dashboard.py @@ -61,3 +61,12 @@ class HistoricalMetricDataResponse(BaseModel): dag_run_types: DAGRunTypes dag_run_states: DAGRunStates task_instance_states: TaskInstanceStateCount + + +class DashboardDagStatsResponse(BaseModel): + """Dashboard DAG Stats serializer for responses.""" + + active_dag_count: int + failed_dag_count: int + running_dag_count: int + queued_dag_count: int diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py index 822eb6f3e1a89..b523dce96ffaa 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py @@ -19,46 +19,24 @@ from datetime import datetime -from pydantic import BaseModel, Field +from pydantic import BaseModel -from airflow.api_fastapi.core_api.datamodels.ui.structure import StructureDataResponse -from airflow.utils.state import DagRunState, TaskInstanceState -from airflow.utils.types import DagRunType +from airflow.utils.state import TaskInstanceState -class GridTaskInstanceSummary(BaseModel): +class LightGridTaskInstanceSummary(BaseModel): """Task Instance Summary model for the Grid UI.""" task_id: str - try_number: int - start_date: datetime | None - end_date: datetime | None - queued_dttm: datetime | None - child_states: dict[str, int] | None - task_count: int state: TaskInstanceState | None - note: str | None + child_states: dict[TaskInstanceState | None, int] | None + min_start_date: datetime | None + max_end_date: datetime | None -class GridDAGRunwithTIs(BaseModel): +class GridTISummaries(BaseModel): """DAG Run model for the Grid UI.""" - run_id: str = Field(serialization_alias="dag_run_id", validation_alias="run_id") - queued_at: datetime | None - start_date: datetime | None - end_date: datetime | None - run_after: datetime - state: DagRunState - run_type: DagRunType - logical_date: datetime | None - data_interval_start: datetime | None - data_interval_end: datetime | None - note: str | None - task_instances: list[GridTaskInstanceSummary] - - -class GridResponse(BaseModel): - """Response model for the Grid UI.""" - - dag_runs: list[GridDAGRunwithTIs] - structure: StructureDataResponse + run_id: str + dag_id: str + task_instances: list[LightGridTaskInstanceSummary] diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py index 2317d8a168b82..d60f052f77705 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py @@ -19,7 +19,7 @@ import json -from pydantic import Field, model_validator +from pydantic import Field, JsonValue, model_validator from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel from airflow.models.base import ID_LEN @@ -46,7 +46,7 @@ def redact_val(self) -> Self: return self except json.JSONDecodeError: # value is not a serialized string representation of a dict. - self.val = redact(self.val, self.key) + self.val = str(redact(self.val, self.key)) return self @@ -54,7 +54,7 @@ class VariableBody(StrictBaseModel): """Variable serializer for bodies.""" key: str = Field(max_length=ID_LEN) - value: str = Field(serialization_alias="val") + value: JsonValue = Field(serialization_alias="val") description: str | None = Field(default=None) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/xcom.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/xcom.py index 189fad8290cb9..ec65436955b7f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/xcom.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/xcom.py @@ -19,7 +19,7 @@ from datetime import datetime from typing import Any -from pydantic import field_validator +from pydantic import AliasPath, Field, field_validator from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel @@ -34,6 +34,7 @@ class XComResponse(BaseModel): task_id: str dag_id: str run_id: str + dag_display_name: str = Field(validation_alias=AliasPath("dag_run", "dag_model", "dag_display_name")) class XComResponseNative(XComResponse): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/init_dagbag.py b/airflow-core/src/airflow/api_fastapi/core_api/init_dagbag.py deleted file mode 100644 index 720276d054b94..0000000000000 --- a/airflow-core/src/airflow/api_fastapi/core_api/init_dagbag.py +++ /dev/null @@ -1,29 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os - -from airflow.models import DagBag -from airflow.settings import DAGS_FOLDER - - -def get_dag_bag() -> DagBag: - """Instantiate the appropriate DagBag based on the ``SKIP_DAGS_PARSING`` environment variable.""" - if os.environ.get("SKIP_DAGS_PARSING") == "True": - return DagBag(os.devnull, include_examples=False) - return DagBag(DAGS_FOLDER, read_dags_from_db=True) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/middleware.py b/airflow-core/src/airflow/api_fastapi/core_api/middleware.py deleted file mode 100644 index e88c9acc5438a..0000000000000 --- a/airflow-core/src/airflow/api_fastapi/core_api/middleware.py +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from fastapi import HTTPException, Request -from starlette.middleware.base import BaseHTTPMiddleware - - -# Custom Middleware Class -class FlaskExceptionsMiddleware(BaseHTTPMiddleware): - """Middleware that converts exceptions thrown in the Flask application to Fastapi exceptions.""" - - async def dispatch(self, request: Request, call_next): - response = await call_next(request) - - # Check if the WSGI response contains an error - if response.status_code >= 400 and response.media_type == "application/json": - body = await response.json() - if "error" in body: - # Transform the WSGI app's exception into a FastAPI HTTPException - raise HTTPException( - status_code=response.status_code, - detail=body["error"], - ) - return response diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml index 4c1c040e01f80..36f435f31cdff 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml @@ -22,6 +22,7 @@ paths: $ref: '#/components/schemas/MenuItemCollectionResponse' security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] /ui/next_run_assets/{dag_id}: get: tags: @@ -30,6 +31,7 @@ paths: operationId: next_run_assets security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] parameters: - name: dag_id in: path @@ -74,6 +76,7 @@ paths: $ref: '#/components/schemas/HTTPExceptionResponse' security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] /ui/connections/hook_meta: get: tags: @@ -94,15 +97,17 @@ paths: title: Response Hook Meta Data security: - OAuth2PasswordBearer: [] - /ui/dags/recent_dag_runs: + - HTTPBearer: [] + /ui/dags: get: tags: - - Dags - summary: Recent Dag Runs - description: Get recent DAG runs. - operationId: recent_dag_runs + - DAG + summary: Get Dags + description: Get DAGs with recent DagRun. + operationId: get_dags_ui security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] parameters: - name: dag_runs_limit in: query @@ -171,7 +176,11 @@ paths: anyOf: - type: string - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." title: Dag Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." - name: dag_display_name_pattern in: query required: false @@ -179,7 +188,11 @@ paths: anyOf: - type: string - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." title: Dag Display Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." - name: exclude_stale in: query required: false @@ -203,6 +216,40 @@ paths: - $ref: '#/components/schemas/DagRunState' - type: 'null' title: Last Dag Run State + - name: bundle_name + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Bundle Name + - name: bundle_version + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - dag_id + title: Order By + - name: is_favorite + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Is Favorite responses: '200': description: Successful Response @@ -216,6 +263,45 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /ui/dags/{dag_id}/latest_run: + get: + tags: + - DAG + summary: Get Latest Run Info + description: Get latest run. + operationId: get_latest_run_info + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + anyOf: + - $ref: '#/components/schemas/DAGRunLightResponse' + - type: 'null' + title: Response Get Latest Run Info + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /ui/dependencies: get: tags: @@ -225,6 +311,7 @@ paths: operationId: get_dependencies security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] parameters: - name: node_id in: query @@ -262,6 +349,7 @@ paths: operationId: historical_metrics security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] parameters: - name: start_date in: query @@ -296,6 +384,23 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /ui/dashboard/dag_stats: + get: + tags: + - Dashboard + summary: Dag Stats + description: Return basic DAG stats with counts of DAGs in various states. + operationId: dag_stats + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DashboardDagStatsResponse' + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] /ui/structure/structure_data: get: tags: @@ -305,6 +410,7 @@ paths: operationId: structure_data security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] parameters: - name: dag_id in: query @@ -372,10 +478,11 @@ paths: get: tags: - Backfill - summary: List Backfills - operationId: list_backfills + summary: List Backfills Ui + operationId: list_backfills_ui security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] parameters: - name: limit in: query @@ -397,8 +504,11 @@ paths: in: query required: false schema: - type: string - default: id + type: array + items: + type: string + default: + - id title: Order By - name: dag_id in: query @@ -435,15 +545,16 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /ui/grid/{dag_id}: + /ui/grid/structure/{dag_id}: get: tags: - Grid - summary: Grid Data - description: Return grid data. - operationId: grid_data + summary: Get Dag Structure + description: Return dag structure for grid view. + operationId: get_dag_structure security: - OAuth2PasswordBearer: [] + - HTTPBearer: [] parameters: - name: dag_id in: path @@ -451,28 +562,95 @@ paths: schema: type: string title: Dag Id - - name: include_upstream + - name: offset in: query required: false schema: - type: boolean - default: false - title: Include Upstream - - name: include_downstream + type: integer + minimum: 0 + default: 0 + title: Offset + - name: limit in: query required: false schema: - type: boolean - default: false - title: Include Downstream - - name: root + type: integer + minimum: 0 + default: 50 + title: Limit + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + - name: run_after_gte in: query required: false schema: anyOf: - type: string + format: date-time - type: 'null' - title: Root + title: Run After Gte + - name: run_after_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Lte + responses: + '200': + description: Successful Response + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/GridNodeResponse' + title: Response Get Dag Structure + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /ui/grid/runs/{dag_id}: + get: + tags: + - Grid + summary: Get Grid Runs + description: Get info about a run for the grid. + operationId: get_grid_runs + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id - name: offset in: query required: false @@ -481,22 +659,6 @@ paths: minimum: 0 default: 0 title: Offset - - name: run_type - in: query - required: false - schema: - type: array - items: - type: string - title: Run Type - - name: state - in: query - required: false - schema: - type: array - items: - type: string - title: State - name: limit in: query required: false @@ -509,8 +671,11 @@ paths: in: query required: false schema: - type: string - default: id + type: array + items: + type: string + default: + - id title: Order By - name: run_after_gte in: query @@ -530,6 +695,132 @@ paths: format: date-time - type: 'null' title: Run After Lte + responses: + '200': + description: Successful Response + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/GridRunsResponse' + title: Response Get Grid Runs + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /ui/grid/ti_summaries/{dag_id}/{run_id}: + get: + tags: + - Grid + summary: Get Grid Ti Summaries + description: 'Get states for TIs / "groups" of TIs. + + + Essentially this is to know what color to put in the squares in the grid. + + + The tricky part here is that we aggregate the state for groups and mapped + tasks. + + + We don''t add all the TIs for mapped TIs -- we only add one entry for the + mapped task and + + its state is an aggregate of its TI states. + + + And for task groups, we add a "task" for that which is not really a task but + is just + + an entry that represents the group (so that we can show a filled in box when + the group + + is not expanded) and its state is an agg of those within it.' + operationId: get_grid_ti_summaries + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: run_id + in: path + required: true + schema: + type: string + title: Run Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/GridTISummaries' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /ui/calendar/{dag_id}: + get: + tags: + - Calendar + summary: Get Calendar + description: Get calendar data for a DAG including historical and planned DAG + runs. + operationId: get_calendar + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: granularity + in: query + required: false + schema: + enum: + - hourly + - daily + type: string + default: daily + title: Granularity - name: logical_date_gte in: query required: false @@ -554,19 +845,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/GridResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found + $ref: '#/components/schemas/CalendarTimeRangeCollectionResponse' '422': description: Validation Error content: @@ -595,6 +874,7 @@ components: properties: id: type: integer + minimum: 0.0 title: Id dag_id: type: string @@ -633,6 +913,9 @@ components: type: string format: date-time title: Updated At + dag_display_name: + type: string + title: Dag Display Name type: object required: - id @@ -646,6 +929,7 @@ components: - created_at - completed_at - updated_at + - dag_display_name title: BackfillResponse description: Base serializer for Backfill. BaseEdgeResponse: @@ -709,20 +993,49 @@ components: - type title: BaseNodeResponse description: Base Node serializer for responses. - ConfigResponse: + CalendarTimeRangeCollectionResponse: properties: - navbar_color: - type: string - title: Navbar Color - navbar_text_color: - type: string - title: Navbar Text Color - navbar_hover_color: + total_entries: + type: integer + title: Total Entries + dag_runs: + items: + $ref: '#/components/schemas/CalendarTimeRangeResponse' + type: array + title: Dag Runs + type: object + required: + - total_entries + - dag_runs + title: CalendarTimeRangeCollectionResponse + description: Response model for calendar time range results. + CalendarTimeRangeResponse: + properties: + date: type: string - title: Navbar Hover Color - navbar_text_hover_color: + format: date-time + title: Date + state: type: string - title: Navbar Text Hover Color + enum: + - queued + - running + - success + - failed + - planned + title: State + count: + type: integer + title: Count + type: object + required: + - date + - state + - count + title: CalendarTimeRangeResponse + description: Represents a summary of DAG runs for a specific calendar time range. + ConfigResponse: + properties: page_size: type: integer title: Page Size @@ -735,9 +1048,6 @@ components: instance_name: type: string title: Instance Name - instance_name_has_markup: - type: boolean - title: Instance Name Has Markup enable_swagger_ui: type: boolean title: Enable Swagger Ui @@ -747,15 +1057,6 @@ components: default_wrap: type: boolean title: Default Wrap - warn_deployment_exposure: - type: boolean - title: Warn Deployment Exposure - audit_view_excluded_events: - type: string - title: Audit View Excluded Events - audit_view_included_events: - type: string - title: Audit View Included Events test_connection: type: string title: Test Connection @@ -764,25 +1065,26 @@ components: $ref: '#/components/schemas/UIAlert' type: array title: Dashboard Alert + show_external_log_redirect: + type: boolean + title: Show External Log Redirect + external_log_name: + anyOf: + - type: string + - type: 'null' + title: External Log Name type: object required: - - navbar_color - - navbar_text_color - - navbar_hover_color - - navbar_text_hover_color - page_size - auto_refresh_interval - hide_paused_dags_by_default - instance_name - - instance_name_has_markup - enable_swagger_ui - require_confirmation_dag_change - default_wrap - - warn_deployment_exposure - - audit_view_excluded_events - - audit_view_included_events - test_connection - dashboard_alert + - show_external_log_redirect title: ConfigResponse description: configuration serializer. ConnectionHookFieldBehavior: @@ -854,6 +1156,53 @@ components: that the API server/Web UI can use this data to render connection form UI.' + DAGRunLightResponse: + properties: + id: + type: integer + title: Id + dag_id: + type: string + title: Dag Id + run_id: + type: string + title: Run Id + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + run_after: + type: string + format: date-time + title: Run After + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + state: + $ref: '#/components/schemas/DagRunState' + type: object + required: + - id + - dag_id + - run_id + - logical_date + - run_after + - start_date + - end_date + - state + title: DAGRunLightResponse + description: DAG Run serializer for responses. DAGRunResponse: properties: dag_run_id: @@ -886,6 +1235,11 @@ components: format: date-time - type: 'null' title: End Date + duration: + anyOf: + - type: number + - type: 'null' + title: Duration data_interval_start: anyOf: - type: string @@ -916,9 +1270,16 @@ components: anyOf: - $ref: '#/components/schemas/DagRunTriggeredByType' - type: 'null' + triggering_user_name: + anyOf: + - type: string + - type: 'null' + title: Triggering User Name conf: - additionalProperties: true - type: object + anyOf: + - additionalProperties: true + type: object + - type: 'null' title: Conf note: anyOf: @@ -930,6 +1291,14 @@ components: $ref: '#/components/schemas/DagVersionResponse' type: array title: Dag Versions + bundle_version: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + dag_display_name: + type: string + title: Dag Display Name type: object required: - dag_run_id @@ -938,6 +1307,7 @@ components: - queued_at - start_date - end_date + - duration - data_interval_start - data_interval_end - run_after @@ -945,9 +1315,12 @@ components: - run_type - state - triggered_by + - triggering_user_name - conf - note - dag_versions + - bundle_version + - dag_display_name title: DAGRunResponse description: DAG Run serializer for responses. DAGRunStates: @@ -1041,6 +1414,11 @@ components: - type: string - type: 'null' title: Bundle Name + bundle_version: + anyOf: + - type: string + - type: 'null' + title: Bundle Version relative_fileloc: anyOf: - type: string @@ -1140,6 +1518,7 @@ components: - last_parsed_time - last_expired - bundle_name + - bundle_version - relative_fileloc - fileloc - description @@ -1239,6 +1618,9 @@ components: type: string format: date-time title: Created At + dag_display_name: + type: string + title: Dag Display Name bundle_url: anyOf: - type: string @@ -1253,9 +1635,32 @@ components: - bundle_name - bundle_version - created_at + - dag_display_name - bundle_url title: DagVersionResponse description: Dag Version serializer for responses. + DashboardDagStatsResponse: + properties: + active_dag_count: + type: integer + title: Active Dag Count + failed_dag_count: + type: integer + title: Failed Dag Count + running_dag_count: + type: integer + title: Running Dag Count + queued_dag_count: + type: integer + title: Queued Dag Count + type: object + required: + - active_dag_count + - failed_dag_count + - running_dag_count + - queued_dag_count + title: DashboardDagStatsResponse + description: Dashboard DAG Stats serializer for responses. EdgeResponse: properties: source_id: @@ -1298,11 +1703,49 @@ components: - text - href title: ExtraMenuItem - GridDAGRunwithTIs: + GridNodeResponse: properties: - dag_run_id: + id: type: string - title: Dag Run Id + title: Id + label: + type: string + title: Label + children: + anyOf: + - items: + $ref: '#/components/schemas/GridNodeResponse' + type: array + - type: 'null' + title: Children + is_mapped: + anyOf: + - type: boolean + - type: 'null' + title: Is Mapped + setup_teardown_type: + anyOf: + - type: string + enum: + - setup + - teardown + - type: 'null' + title: Setup Teardown Type + type: object + required: + - id + - label + - is_mapped + title: GridNodeResponse + description: Base Node serializer for responses. + GridRunsResponse: + properties: + dag_id: + type: string + title: Dag Id + run_id: + type: string + title: Run Id queued_at: anyOf: - type: string @@ -1326,126 +1769,48 @@ components: format: date-time title: Run After state: - $ref: '#/components/schemas/DagRunState' - run_type: - $ref: '#/components/schemas/DagRunType' - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - data_interval_start: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval Start - data_interval_end: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval End - note: anyOf: - - type: string + - $ref: '#/components/schemas/TaskInstanceState' - type: 'null' - title: Note - task_instances: - items: - $ref: '#/components/schemas/GridTaskInstanceSummary' - type: array - title: Task Instances + run_type: + $ref: '#/components/schemas/DagRunType' + duration: + type: integer + title: Duration + readOnly: true type: object required: - - dag_run_id + - dag_id + - run_id - queued_at - start_date - end_date - run_after - state - run_type - - logical_date - - data_interval_start - - data_interval_end - - note - - task_instances - title: GridDAGRunwithTIs - description: DAG Run model for the Grid UI. - GridResponse: + - duration + title: GridRunsResponse + description: Base Node serializer for responses. + GridTISummaries: properties: - dag_runs: + run_id: + type: string + title: Run Id + dag_id: + type: string + title: Dag Id + task_instances: items: - $ref: '#/components/schemas/GridDAGRunwithTIs' + $ref: '#/components/schemas/LightGridTaskInstanceSummary' type: array - title: Dag Runs - structure: - $ref: '#/components/schemas/StructureDataResponse' - type: object - required: - - dag_runs - - structure - title: GridResponse - description: Response model for the Grid UI. - GridTaskInstanceSummary: - properties: - task_id: - type: string - title: Task Id - try_number: - type: integer - title: Try Number - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - queued_dttm: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Queued Dttm - child_states: - anyOf: - - additionalProperties: - type: integer - type: object - - type: 'null' - title: Child States - task_count: - type: integer - title: Task Count - state: - anyOf: - - $ref: '#/components/schemas/TaskInstanceState' - - type: 'null' - note: - anyOf: - - type: string - - type: 'null' - title: Note + title: Task Instances type: object required: - - task_id - - try_number - - start_date - - end_date - - queued_dttm - - child_states - - task_count - - state - - note - title: GridTaskInstanceSummary - description: Task Instance Summary model for the Grid UI. + - run_id + - dag_id + - task_instances + title: GridTISummaries + description: DAG Run model for the Grid UI. HTTPExceptionResponse: properties: detail: @@ -1483,9 +1848,47 @@ components: - task_instance_states title: HistoricalMetricDataResponse description: Historical Metric Data serializer for responses. + LightGridTaskInstanceSummary: + properties: + task_id: + type: string + title: Task Id + state: + anyOf: + - $ref: '#/components/schemas/TaskInstanceState' + - type: 'null' + child_states: + anyOf: + - additionalProperties: + type: integer + type: object + - type: 'null' + title: Child States + min_start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Min Start Date + max_end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Max End Date + type: object + required: + - task_id + - state + - child_states + - min_start_date + - max_end_date + title: LightGridTaskInstanceSummary + description: Task Instance Summary model for the Grid UI. MenuItem: type: string enum: + - Required Actions - Assets - Audit Log - Config @@ -1781,3 +2184,6 @@ components: password: scopes: {} tokenUrl: /auth/token + HTTPBearer: + type: http + scheme: bearer diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v1-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v1-rest-api-generated.yaml deleted file mode 100644 index dce59cbefa2f1..0000000000000 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v1-rest-api-generated.yaml +++ /dev/null @@ -1,10481 +0,0 @@ -openapi: 3.1.0 -info: - title: Airflow API - description: Airflow API. All endpoints located under ``/api/v2`` can be used safely, - are stable and backward compatible. Endpoints located under ``/ui`` are dedicated - to the UI and are subject to breaking change depending on the need of the frontend. - Users should not rely on those but use the public ones instead. - version: '2' -paths: - /api/v2/assets: - get: - tags: - - Asset - summary: Get Assets - description: Get assets. - operationId: get_assets - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: name_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Name Pattern - - name: uri_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Uri Pattern - - name: dag_ids - in: query - required: false - schema: - type: array - items: - type: string - title: Dag Ids - - name: only_active - in: query - required: false - schema: - type: boolean - default: true - title: Only Active - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/AssetCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/assets/aliases: - get: - tags: - - Asset - summary: Get Asset Aliases - description: Get asset aliases. - operationId: get_asset_aliases - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: name_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Name Pattern - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/AssetAliasCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/assets/aliases/{asset_alias_id}: - get: - tags: - - Asset - summary: Get Asset Alias - description: Get an asset alias. - operationId: get_asset_alias - security: - - OAuth2PasswordBearer: [] - parameters: - - name: asset_alias_id - in: path - required: true - schema: - type: integer - title: Asset Alias Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: {} - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/assets/events: - get: - tags: - - Asset - summary: Get Asset Events - description: Get asset events. - operationId: get_asset_events - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: timestamp - title: Order By - - name: asset_id - in: query - required: false - schema: - anyOf: - - type: integer - - type: 'null' - title: Asset Id - - name: source_dag_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Source Dag Id - - name: source_task_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Source Task Id - - name: source_run_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Source Run Id - - name: source_map_index - in: query - required: false - schema: - anyOf: - - type: integer - - type: 'null' - title: Source Map Index - - name: timestamp_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Timestamp Gte - - name: timestamp_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Timestamp Lte - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/AssetEventCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - Asset - summary: Create Asset Event - description: Create asset events. - operationId: create_asset_event - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/CreateAssetEventsBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/AssetEventResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/assets/{asset_id}/materialize: - post: - tags: - - Asset - summary: Materialize Asset - description: Materialize an asset by triggering a DAG run that produces it. - operationId: materialize_asset - security: - - OAuth2PasswordBearer: [] - parameters: - - name: asset_id - in: path - required: true - schema: - type: integer - title: Asset Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/assets/{asset_id}/queuedEvents: - get: - tags: - - Asset - summary: Get Asset Queued Events - description: Get queued asset events for an asset. - operationId: get_asset_queued_events - security: - - OAuth2PasswordBearer: [] - parameters: - - name: asset_id - in: path - required: true - schema: - type: integer - title: Asset Id - - name: before - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Before - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/QueuedEventCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - delete: - tags: - - Asset - summary: Delete Asset Queued Events - description: Delete queued asset events for an asset. - operationId: delete_asset_queued_events - security: - - OAuth2PasswordBearer: [] - parameters: - - name: asset_id - in: path - required: true - schema: - type: integer - title: Asset Id - - name: before - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Before - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/assets/{asset_id}: - get: - tags: - - Asset - summary: Get Asset - description: Get an asset. - operationId: get_asset - security: - - OAuth2PasswordBearer: [] - parameters: - - name: asset_id - in: path - required: true - schema: - type: integer - title: Asset Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/AssetResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/assets/queuedEvents: - get: - tags: - - Asset - summary: Get Dag Asset Queued Events - description: Get queued asset events for a DAG. - operationId: get_dag_asset_queued_events - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: before - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Before - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/QueuedEventCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - delete: - tags: - - Asset - summary: Delete Dag Asset Queued Events - operationId: delete_dag_asset_queued_events - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: before - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Before - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents: - get: - tags: - - Asset - summary: Get Dag Asset Queued Event - description: Get a queued asset event for a DAG. - operationId: get_dag_asset_queued_event - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: asset_id - in: path - required: true - schema: - type: integer - title: Asset Id - - name: before - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Before - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/QueuedEventResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - delete: - tags: - - Asset - summary: Delete Dag Asset Queued Event - description: Delete a queued asset event for a DAG. - operationId: delete_dag_asset_queued_event - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: asset_id - in: path - required: true - schema: - type: integer - title: Asset Id - - name: before - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Before - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/backfills: - get: - tags: - - Backfill - summary: List Backfills - operationId: list_backfills - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: query - required: true - schema: - type: string - title: Dag Id - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - Backfill - summary: Create Backfill - operationId: create_backfill - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillPostBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/backfills/{backfill_id}: - get: - tags: - - Backfill - summary: Get Backfill - operationId: get_backfill - security: - - OAuth2PasswordBearer: [] - parameters: - - name: backfill_id - in: path - required: true - schema: - type: string - title: Backfill Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/backfills/{backfill_id}/pause: - put: - tags: - - Backfill - summary: Pause Backfill - operationId: pause_backfill - security: - - OAuth2PasswordBearer: [] - parameters: - - name: backfill_id - in: path - required: true - schema: - title: Backfill Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/backfills/{backfill_id}/unpause: - put: - tags: - - Backfill - summary: Unpause Backfill - operationId: unpause_backfill - security: - - OAuth2PasswordBearer: [] - parameters: - - name: backfill_id - in: path - required: true - schema: - title: Backfill Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/backfills/{backfill_id}/cancel: - put: - tags: - - Backfill - summary: Cancel Backfill - operationId: cancel_backfill - security: - - OAuth2PasswordBearer: [] - parameters: - - name: backfill_id - in: path - required: true - schema: - title: Backfill Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/backfills/dry_run: - post: - tags: - - Backfill - summary: Create Backfill Dry Run - operationId: create_backfill_dry_run - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/BackfillPostBody' - required: true - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DryRunBackfillCollectionResponse' - '401': - description: Unauthorized - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '403': - description: Forbidden - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '404': - description: Not Found - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '409': - description: Conflict - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - security: - - OAuth2PasswordBearer: [] - /api/v2/connections/{connection_id}: - delete: - tags: - - Connection - summary: Delete Connection - description: Delete a connection entry. - operationId: delete_connection - security: - - OAuth2PasswordBearer: [] - parameters: - - name: connection_id - in: path - required: true - schema: - type: string - title: Connection Id - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - get: - tags: - - Connection - summary: Get Connection - description: Get a connection entry. - operationId: get_connection - security: - - OAuth2PasswordBearer: [] - parameters: - - name: connection_id - in: path - required: true - schema: - type: string - title: Connection Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Connection - summary: Patch Connection - description: Update a connection entry. - operationId: patch_connection - security: - - OAuth2PasswordBearer: [] - parameters: - - name: connection_id - in: path - required: true - schema: - type: string - title: Connection Id - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/connections: - get: - tags: - - Connection - summary: Get Connections - description: Get all connection entries. - operationId: get_connections - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - - name: connection_id_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Connection Id Pattern - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - Connection - summary: Post Connection - description: Create connection entry. - operationId: post_connection - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionBody' - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Connection - summary: Bulk Connections - description: Bulk create, update, and delete connections. - operationId: bulk_connections - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/BulkBody_ConnectionBody_' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BulkResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/connections/test: - post: - tags: - - Connection - summary: Test Connection - description: 'Test an API connection. - - - This method first creates an in-memory transient conn_id & exports that to - an env var, - - as some hook classes tries to find out the `conn` from their __init__ method - & errors out if not found. - - It also deletes the conn id env connection after the test.' - operationId: test_connection - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionBody' - required: true - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ConnectionTestResponse' - '401': - description: Unauthorized - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '403': - description: Forbidden - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - security: - - OAuth2PasswordBearer: [] - /api/v2/connections/defaults: - post: - tags: - - Connection - summary: Create Default Connections - description: Create default connections. - operationId: create_default_connections - responses: - '204': - description: Successful Response - '401': - description: Unauthorized - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - '403': - description: Forbidden - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - security: - - OAuth2PasswordBearer: [] - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}: - get: - tags: - - DagRun - summary: Get Dag Run - operationId: get_dag_run - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - delete: - tags: - - DagRun - summary: Delete Dag Run - description: Delete a DAG Run entry. - operationId: delete_dag_run - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - DagRun - summary: Patch Dag Run - description: Modify a DAG Run. - operationId: patch_dag_run - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunPatchBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamAssetEvents: - get: - tags: - - DagRun - summary: Get Upstream Asset Events - description: If dag run is asset-triggered, return the asset events that triggered - it. - operationId: get_upstream_asset_events - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/AssetEventCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/clear: - post: - tags: - - DagRun - summary: Clear Dag Run - operationId: clear_dag_run - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunClearBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - anyOf: - - $ref: '#/components/schemas/TaskInstanceCollectionResponse' - - $ref: '#/components/schemas/DAGRunResponse' - title: Response Clear Dag Run - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns: - get: - tags: - - DagRun - summary: Get Dag Runs - description: 'Get all DAG Runs. - - - This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for - all DAGs.' - operationId: get_dag_runs - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: run_after_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Gte - - name: run_after_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Lte - - name: logical_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Gte - - name: logical_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Lte - - name: start_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Gte - - name: start_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Lte - - name: end_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Gte - - name: end_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Lte - - name: updated_at_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Updated At Gte - - name: updated_at_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Updated At Lte - - name: run_type - in: query - required: false - schema: - type: array - items: - type: string - title: Run Type - - name: state - in: query - required: false - schema: - type: array - items: - type: string - title: State - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - DagRun - summary: Trigger Dag Run - description: Trigger a DAG. - operationId: trigger_dag_run - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - title: Dag Id - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/TriggerDAGRunPostBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/list: - post: - tags: - - DagRun - summary: Get List Dag Runs Batch - description: Get a list of DAG Runs. - operationId: get_list_dag_runs_batch - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - const: '~' - type: string - title: Dag Id - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunsBatchBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGRunCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dagSources/{dag_id}: - get: - tags: - - DagSource - summary: Get Dag Source - description: Get source code using file token. - operationId: get_dag_source - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: version_number - in: query - required: false - schema: - anyOf: - - type: integer - - type: 'null' - title: Version Number - - name: accept - in: header - required: false - schema: - type: string - enum: - - application/json - - text/plain - - '*/*' - default: '*/*' - title: Accept - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGSourceResponse' - text/plain: - schema: - type: string - example: dag code - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '406': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Acceptable - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dagStats: - get: - tags: - - DagStats - summary: Get Dag Stats - description: Get Dag statistics. - operationId: get_dag_stats - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_ids - in: query - required: false - schema: - type: array - items: - type: string - title: Dag Ids - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DagStatsCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dagReports: - get: - tags: - - DagReport - summary: Get Dag Reports - description: Get DAG report. - operationId: get_dag_reports - security: - - OAuth2PasswordBearer: [] - parameters: - - name: subdir - in: query - required: true - schema: - type: string - title: Subdir - responses: - '200': - description: Successful Response - content: - application/json: - schema: {} - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/config: - get: - tags: - - Config - summary: Get Config - operationId: get_config - security: - - OAuth2PasswordBearer: [] - parameters: - - name: section - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Section - - name: accept - in: header - required: false - schema: - type: string - enum: - - application/json - - text/plain - - '*/*' - default: '*/*' - title: Accept - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Config' - text/plain: - schema: - type: string - example: '[core] - - dags_folder = /opt/airflow/dags - - base_log_folder = /opt/airflow/logs - - - [smtp] - - smtp_host = localhost - - smtp_mail_from = airflow@example.com - - ' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '406': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Acceptable - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/config/section/{section}/option/{option}: - get: - tags: - - Config - summary: Get Config Value - operationId: get_config_value - security: - - OAuth2PasswordBearer: [] - parameters: - - name: section - in: path - required: true - schema: - type: string - title: Section - - name: option - in: path - required: true - schema: - type: string - title: Option - - name: accept - in: header - required: false - schema: - type: string - enum: - - application/json - - text/plain - - '*/*' - default: '*/*' - title: Accept - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/Config' - text/plain: - schema: - type: string - example: '[core] - - dags_folder = /opt/airflow/dags - - base_log_folder = /opt/airflow/logs - - ' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '406': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Acceptable - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dagWarnings: - get: - tags: - - DagWarning - summary: List Dag Warnings - description: Get a list of DAG warnings. - operationId: list_dag_warnings - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Dag Id - - name: warning_type - in: query - required: false - schema: - anyOf: - - $ref: '#/components/schemas/DagWarningType' - - type: 'null' - title: Warning Type - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: dag_id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGWarningCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags: - get: - tags: - - DAG - summary: Get Dags - description: Get all DAGs. - operationId: get_dags - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: tags - in: query - required: false - schema: - type: array - items: - type: string - title: Tags - - name: tags_match_mode - in: query - required: false - schema: - anyOf: - - enum: - - any - - all - type: string - - type: 'null' - title: Tags Match Mode - - name: owners - in: query - required: false - schema: - type: array - items: - type: string - title: Owners - - name: dag_id_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Dag Id Pattern - - name: dag_display_name_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Dag Display Name Pattern - - name: exclude_stale - in: query - required: false - schema: - type: boolean - default: true - title: Exclude Stale - - name: paused - in: query - required: false - schema: - anyOf: - - type: boolean - - type: 'null' - title: Paused - - name: last_dag_run_state - in: query - required: false - schema: - anyOf: - - $ref: '#/components/schemas/DagRunState' - - type: 'null' - title: Last Dag Run State - - name: dag_run_start_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Dag Run Start Date Gte - - name: dag_run_start_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Dag Run Start Date Lte - - name: dag_run_end_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Dag Run End Date Gte - - name: dag_run_end_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Dag Run End Date Lte - - name: dag_run_state - in: query - required: false - schema: - type: array - items: - type: string - title: Dag Run State - - name: order_by - in: query - required: false - schema: - type: string - default: dag_id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - DAG - summary: Patch Dags - description: Patch multiple DAGs. - operationId: patch_dags - security: - - OAuth2PasswordBearer: [] - parameters: - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: tags - in: query - required: false - schema: - type: array - items: - type: string - title: Tags - - name: tags_match_mode - in: query - required: false - schema: - anyOf: - - enum: - - any - - all - type: string - - type: 'null' - title: Tags Match Mode - - name: owners - in: query - required: false - schema: - type: array - items: - type: string - title: Owners - - name: dag_id_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Dag Id Pattern - - name: exclude_stale - in: query - required: false - schema: - type: boolean - default: true - title: Exclude Stale - - name: paused - in: query - required: false - schema: - anyOf: - - type: boolean - - type: 'null' - title: Paused - - name: last_dag_run_state - in: query - required: false - schema: - anyOf: - - $ref: '#/components/schemas/DagRunState' - - type: 'null' - title: Last Dag Run State - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGPatchBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}: - get: - tags: - - DAG - summary: Get Dag - description: Get basic information about a DAG. - operationId: get_dag - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity - patch: - tags: - - DAG - summary: Patch Dag - description: Patch the specific DAG. - operationId: patch_dag - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGPatchBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - delete: - tags: - - DAG - summary: Delete Dag - description: Delete the specific DAG. - operationId: delete_dag - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: {} - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity - /api/v2/dags/{dag_id}/details: - get: - tags: - - DAG - summary: Get Dag Details - description: Get details of DAG. - operationId: get_dag_details - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGDetailsResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/eventLogs/{event_log_id}: - get: - tags: - - Event Log - summary: Get Event Log - operationId: get_event_log - security: - - OAuth2PasswordBearer: [] - parameters: - - name: event_log_id - in: path - required: true - schema: - type: integer - title: Event Log Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/EventLogResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/eventLogs: - get: - tags: - - Event Log - summary: Get Event Logs - description: Get all Event Logs. - operationId: get_event_logs - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - - name: dag_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Dag Id - - name: task_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Task Id - - name: run_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Run Id - - name: map_index - in: query - required: false - schema: - anyOf: - - type: integer - - type: 'null' - title: Map Index - - name: try_number - in: query - required: false - schema: - anyOf: - - type: integer - - type: 'null' - title: Try Number - - name: owner - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Owner - - name: event - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Event - - name: excluded_events - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Excluded Events - - name: included_events - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Included Events - - name: before - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Before - - name: after - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: After - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/EventLogCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links: - get: - tags: - - Extra Links - - Task Instance - summary: Get Extra Links - description: Get extra links for task instance. - operationId: get_extra_links - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ExtraLinkCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/importErrors/{import_error_id}: - get: - tags: - - Import Error - summary: Get Import Error - description: Get an import error. - operationId: get_import_error - security: - - OAuth2PasswordBearer: [] - parameters: - - name: import_error_id - in: path - required: true - schema: - type: integer - title: Import Error Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ImportErrorResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/importErrors: - get: - tags: - - Import Error - summary: Get Import Errors - description: Get all import errors. - operationId: get_import_errors - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ImportErrorCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/jobs: - get: - tags: - - Job - summary: Get Jobs - description: Get all jobs. - operationId: get_jobs - security: - - OAuth2PasswordBearer: [] - parameters: - - name: is_alive - in: query - required: false - schema: - anyOf: - - type: boolean - - type: 'null' - title: Is Alive - - name: start_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Gte - - name: start_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Lte - - name: end_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Gte - - name: end_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Lte - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - - name: job_state - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Job State - - name: job_type - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Job Type - - name: hostname - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Hostname - - name: executor_class - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Executor Class - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/JobCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/plugins: - get: - tags: - - Plugin - summary: Get Plugins - operationId: get_plugins - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/PluginCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/pools/{pool_name}: - delete: - tags: - - Pool - summary: Delete Pool - description: Delete a pool entry. - operationId: delete_pool - security: - - OAuth2PasswordBearer: [] - parameters: - - name: pool_name - in: path - required: true - schema: - type: string - title: Pool Name - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - get: - tags: - - Pool - summary: Get Pool - description: Get a pool. - operationId: get_pool - security: - - OAuth2PasswordBearer: [] - parameters: - - name: pool_name - in: path - required: true - schema: - type: string - title: Pool Name - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/PoolResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Pool - summary: Patch Pool - description: Update a Pool. - operationId: patch_pool - security: - - OAuth2PasswordBearer: [] - parameters: - - name: pool_name - in: path - required: true - schema: - type: string - title: Pool Name - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/PoolPatchBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/PoolResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/pools: - get: - tags: - - Pool - summary: Get Pools - description: Get all pools entries. - operationId: get_pools - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - - name: pool_name_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Pool Name Pattern - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/PoolCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - Pool - summary: Post Pool - description: Create a Pool. - operationId: post_pool - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/PoolBody' - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/PoolResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Pool - summary: Bulk Pools - description: Bulk create, update, and delete pools. - operationId: bulk_pools - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/BulkBody_PoolBody_' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BulkResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/providers: - get: - tags: - - Provider - summary: Get Providers - description: Get providers. - operationId: get_providers - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/ProviderCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}: - get: - tags: - - XCom - summary: Get Xcom Entry - description: Get an XCom entry. - operationId: get_xcom_entry - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: xcom_key - in: path - required: true - schema: - type: string - title: Xcom Key - - name: map_index - in: query - required: false - schema: - type: integer - minimum: -1 - default: -1 - title: Map Index - - name: deserialize - in: query - required: false - schema: - type: boolean - default: false - title: Deserialize - - name: stringify - in: query - required: false - schema: - type: boolean - default: false - title: Stringify - responses: - '200': - description: Successful Response - content: - application/json: - schema: - anyOf: - - $ref: '#/components/schemas/XComResponseNative' - - $ref: '#/components/schemas/XComResponseString' - title: Response Get Xcom Entry - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - XCom - summary: Update Xcom Entry - description: Update an existing XCom entry. - operationId: update_xcom_entry - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: xcom_key - in: path - required: true - schema: - type: string - title: Xcom Key - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/XComUpdateBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/XComResponseNative' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries: - get: - tags: - - XCom - summary: Get Xcom Entries - description: 'Get all XCom entries. - - - This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to - retrieve XCom entries for all DAGs.' - operationId: get_xcom_entries - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: xcom_key - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Xcom Key - - name: map_index - in: query - required: false - schema: - anyOf: - - type: integer - minimum: -1 - - type: 'null' - title: Map Index - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/XComCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - XCom - summary: Create Xcom Entry - description: Create an XCom entry. - operationId: create_xcom_entry - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/XComCreateBody' - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/XComResponseNative' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}: - get: - tags: - - Task Instance - summary: Get Task Instance - description: Get task instance. - operationId: get_task_instance - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Task Instance - summary: Patch Task Instance - description: Update a task instance. - operationId: patch_task_instance - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/PatchTaskInstanceBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped: - get: - tags: - - Task Instance - summary: Get Mapped Task Instances - description: Get list of mapped task instances. - operationId: get_mapped_task_instances - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: run_after_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Gte - - name: run_after_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Lte - - name: logical_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Gte - - name: logical_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Lte - - name: start_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Gte - - name: start_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Lte - - name: end_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Gte - - name: end_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Lte - - name: updated_at_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Updated At Gte - - name: updated_at_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Updated At Lte - - name: duration_gte - in: query - required: false - schema: - anyOf: - - type: number - - type: 'null' - title: Duration Gte - - name: duration_lte - in: query - required: false - schema: - anyOf: - - type: number - - type: 'null' - title: Duration Lte - - name: state - in: query - required: false - schema: - type: array - items: - type: string - title: State - - name: pool - in: query - required: false - schema: - type: array - items: - type: string - title: Pool - - name: queue - in: query - required: false - schema: - type: array - items: - type: string - title: Queue - - name: executor - in: query - required: false - schema: - type: array - items: - type: string - title: Executor - - name: version_number - in: query - required: false - schema: - type: array - items: - type: integer - title: Version Number - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: map_index - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies: - get: - tags: - - Task Instance - summary: Get Task Instance Dependencies - description: Get dependencies blocking task from getting scheduled. - operationId: get_task_instance_dependencies - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: path - required: true - schema: - type: integer - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskDependencyCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies: - get: - tags: - - Task Instance - summary: Get Task Instance Dependencies - description: Get dependencies blocking task from getting scheduled. - operationId: get_task_instance_dependencies - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskDependencyCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries: - get: - tags: - - Task Instance - summary: Get Task Instance Tries - description: Get list of task instances history. - operationId: get_task_instance_tries - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceHistoryCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries: - get: - tags: - - Task Instance - summary: Get Mapped Task Instance Tries - operationId: get_mapped_task_instance_tries - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: path - required: true - schema: - type: integer - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceHistoryCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}: - get: - tags: - - Task Instance - summary: Get Mapped Task Instance - description: Get task instance. - operationId: get_mapped_task_instance - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: path - required: true - schema: - type: integer - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Task Instance - summary: Patch Task Instance - description: Update a task instance. - operationId: patch_task_instance - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: path - required: true - schema: - type: integer - title: Map Index - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/PatchTaskInstanceBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances: - get: - tags: - - Task Instance - summary: Get Task Instances - description: 'Get list of task instances. - - - This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve - Task Instances for all DAGs - - and DAG runs.' - operationId: get_task_instances - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Task Id - - name: run_after_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Gte - - name: run_after_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Lte - - name: logical_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Gte - - name: logical_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Lte - - name: start_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Gte - - name: start_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Lte - - name: end_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Gte - - name: end_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Lte - - name: updated_at_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Updated At Gte - - name: updated_at_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Updated At Lte - - name: duration_gte - in: query - required: false - schema: - anyOf: - - type: number - - type: 'null' - title: Duration Gte - - name: duration_lte - in: query - required: false - schema: - anyOf: - - type: number - - type: 'null' - title: Duration Lte - - name: task_display_name_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Task Display Name Pattern - - name: state - in: query - required: false - schema: - type: array - items: - type: string - title: State - - name: pool - in: query - required: false - schema: - type: array - items: - type: string - title: Pool - - name: queue - in: query - required: false - schema: - type: array - items: - type: string - title: Queue - - name: executor - in: query - required: false - schema: - type: array - items: - type: string - title: Executor - - name: version_number - in: query - required: false - schema: - type: array - items: - type: integer - title: Version Number - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: map_index - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list: - post: - tags: - - Task Instance - summary: Get Task Instances Batch - description: Get list of task instances. - operationId: get_task_instances_batch - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - const: '~' - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - const: '~' - type: string - title: Dag Run Id - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstancesBatchBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}: - get: - tags: - - Task Instance - summary: Get Task Instance Try Details - description: Get task instance details by try number. - operationId: get_task_instance_try_details - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: task_try_number - in: path - required: true - schema: - type: integer - title: Task Try Number - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceHistoryResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}: - get: - tags: - - Task Instance - summary: Get Mapped Task Instance Try Details - operationId: get_mapped_task_instance_try_details - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: task_try_number - in: path - required: true - schema: - type: integer - title: Task Try Number - - name: map_index - in: path - required: true - schema: - type: integer - title: Map Index - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceHistoryResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/clearTaskInstances: - post: - tags: - - Task Instance - summary: Post Clear Task Instances - description: Clear task instances. - operationId: post_clear_task_instances - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/ClearTaskInstancesBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dry_run: - patch: - tags: - - Task Instance - summary: Patch Task Instance Dry Run - description: Update a task instance dry_run mode. - operationId: patch_task_instance_dry_run - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: path - required: true - schema: - type: integer - title: Map Index - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/PatchTaskInstanceBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dry_run: - patch: - tags: - - Task Instance - summary: Patch Task Instance Dry Run - description: Update a task instance dry_run mode. - operationId: patch_task_instance_dry_run - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/PatchTaskInstanceBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstanceCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/tasks: - get: - tags: - - Task - summary: Get Tasks - description: Get tasks for DAG. - operationId: get_tasks - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: order_by - in: query - required: false - schema: - type: string - default: task_id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/tasks/{task_id}: - get: - tags: - - Task - summary: Get Task - description: Get simplified representation of a task. - operationId: get_task - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: task_id - in: path - required: true - schema: - title: Task Id - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/variables/{variable_key}: - delete: - tags: - - Variable - summary: Delete Variable - description: Delete a variable entry. - operationId: delete_variable - security: - - OAuth2PasswordBearer: [] - parameters: - - name: variable_key - in: path - required: true - schema: - type: string - title: Variable Key - responses: - '204': - description: Successful Response - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - get: - tags: - - Variable - summary: Get Variable - description: Get a variable entry. - operationId: get_variable - security: - - OAuth2PasswordBearer: [] - parameters: - - name: variable_key - in: path - required: true - schema: - type: string - title: Variable Key - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VariableResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Variable - summary: Patch Variable - description: Update a variable by key. - operationId: patch_variable - security: - - OAuth2PasswordBearer: [] - parameters: - - name: variable_key - in: path - required: true - schema: - type: string - title: Variable Key - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/VariableBody' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VariableResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/variables: - get: - tags: - - Variable - summary: Get Variables - description: Get all Variables entries. - operationId: get_variables - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - - name: variable_key_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Variable Key Pattern - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VariableCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - post: - tags: - - Variable - summary: Post Variable - description: Create a variable. - operationId: post_variable - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/VariableBody' - responses: - '201': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VariableResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Conflict - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - patch: - tags: - - Variable - summary: Bulk Variables - description: Bulk create, update, and delete variables. - operationId: bulk_variables - security: - - OAuth2PasswordBearer: [] - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/BulkBody_VariableBody_' - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/BulkResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}: - get: - tags: - - Task Instance - summary: Get Log - description: Get logs for a specific task instance. - operationId: get_log - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: dag_run_id - in: path - required: true - schema: - type: string - title: Dag Run Id - - name: task_id - in: path - required: true - schema: - type: string - title: Task Id - - name: try_number - in: path - required: true - schema: - type: integer - exclusiveMinimum: 0 - title: Try Number - - name: full_content - in: query - required: false - schema: - type: boolean - default: false - title: Full Content - - name: map_index - in: query - required: false - schema: - type: integer - default: -1 - title: Map Index - - name: token - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Token - - name: accept - in: header - required: false - schema: - type: string - enum: - - application/json - - text/plain - - '*/*' - default: '*/*' - title: Accept - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/TaskInstancesLogResponse' - text/plain: - schema: - type: string - example: 'content - - ' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/parseDagFile/{file_token}: - put: - tags: - - DAG Parsing - summary: Reparse Dag File - description: Request re-parsing a DAG file. - operationId: reparse_dag_file - security: - - OAuth2PasswordBearer: [] - parameters: - - name: file_token - in: path - required: true - schema: - type: string - title: File Token - responses: - '201': - description: Successful Response - content: - application/json: - schema: - type: 'null' - title: Response Reparse Dag File - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dagTags: - get: - tags: - - DAG - summary: Get Dag Tags - description: Get all DAG tags. - operationId: get_dag_tags - security: - - OAuth2PasswordBearer: [] - parameters: - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: order_by - in: query - required: false - schema: - type: string - default: name - title: Order By - - name: tag_name_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Tag Name Pattern - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGTagCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagVersions/{version_number}: - get: - tags: - - DagVersion - summary: Get Dag Version - description: Get one Dag Version. - operationId: get_dag_version - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: version_number - in: path - required: true - schema: - type: integer - title: Version Number - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DagVersionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/dags/{dag_id}/dagVersions: - get: - tags: - - DagVersion - summary: Get Dag Versions - description: 'Get all DAG Versions. - - - This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions - for all DAGs.' - operationId: get_dag_versions - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: version_number - in: query - required: false - schema: - type: integer - title: Version Number - - name: bundle_name - in: query - required: false - schema: - type: string - title: Bundle Name - - name: bundle_version - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Bundle Version - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/DAGVersionCollectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/monitor/health: - get: - tags: - - Monitor - summary: Get Health - operationId: get_health - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/HealthInfoResponse' - /api/v2/version: - get: - tags: - - Version - summary: Get Version - description: Get version information. - operationId: get_version - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/VersionInfo' - /api/v2/auth/login: - get: - tags: - - Login - summary: Login - description: Redirect to the login URL depending on the AuthManager configured. - operationId: login - parameters: - - name: next - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Next - responses: - '200': - description: Successful Response - content: - application/json: - schema: {} - '307': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Temporary Redirect - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - /api/v2/auth/logout: - get: - tags: - - Login - summary: Logout - description: Logout the user. - operationId: logout - parameters: - - name: next - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Next - responses: - '200': - description: Successful Response - content: - application/json: - schema: {} - '307': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Temporary Redirect - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' -components: - schemas: - AppBuilderMenuItemResponse: - properties: - name: - type: string - title: Name - href: - anyOf: - - type: string - - type: 'null' - title: Href - category: - anyOf: - - type: string - - type: 'null' - title: Category - additionalProperties: true - type: object - required: - - name - title: AppBuilderMenuItemResponse - description: Serializer for AppBuilder Menu Item responses. - AppBuilderViewResponse: - properties: - name: - anyOf: - - type: string - - type: 'null' - title: Name - category: - anyOf: - - type: string - - type: 'null' - title: Category - view: - anyOf: - - type: string - - type: 'null' - title: View - label: - anyOf: - - type: string - - type: 'null' - title: Label - additionalProperties: true - type: object - title: AppBuilderViewResponse - description: Serializer for AppBuilder View responses. - AssetAliasCollectionResponse: - properties: - asset_aliases: - items: - $ref: '#/components/schemas/AssetAliasResponse' - type: array - title: Asset Aliases - total_entries: - type: integer - title: Total Entries - type: object - required: - - asset_aliases - - total_entries - title: AssetAliasCollectionResponse - description: Asset alias collection response. - AssetAliasResponse: - properties: - id: - type: integer - title: Id - name: - type: string - title: Name - group: - type: string - title: Group - type: object - required: - - id - - name - - group - title: AssetAliasResponse - description: Asset alias serializer for responses. - AssetCollectionResponse: - properties: - assets: - items: - $ref: '#/components/schemas/AssetResponse' - type: array - title: Assets - total_entries: - type: integer - title: Total Entries - type: object - required: - - assets - - total_entries - title: AssetCollectionResponse - description: Asset collection response. - AssetEventCollectionResponse: - properties: - asset_events: - items: - $ref: '#/components/schemas/AssetEventResponse' - type: array - title: Asset Events - total_entries: - type: integer - title: Total Entries - type: object - required: - - asset_events - - total_entries - title: AssetEventCollectionResponse - description: Asset event collection response. - AssetEventResponse: - properties: - id: - type: integer - title: Id - asset_id: - type: integer - title: Asset Id - uri: - anyOf: - - type: string - - type: 'null' - title: Uri - name: - anyOf: - - type: string - - type: 'null' - title: Name - group: - anyOf: - - type: string - - type: 'null' - title: Group - extra: - anyOf: - - additionalProperties: true - type: object - - type: 'null' - title: Extra - source_task_id: - anyOf: - - type: string - - type: 'null' - title: Source Task Id - source_dag_id: - anyOf: - - type: string - - type: 'null' - title: Source Dag Id - source_run_id: - anyOf: - - type: string - - type: 'null' - title: Source Run Id - source_map_index: - type: integer - title: Source Map Index - created_dagruns: - items: - $ref: '#/components/schemas/DagRunAssetReference' - type: array - title: Created Dagruns - timestamp: - type: string - format: date-time - title: Timestamp - type: object - required: - - id - - asset_id - - source_map_index - - created_dagruns - - timestamp - title: AssetEventResponse - description: Asset event serializer for responses. - AssetResponse: - properties: - id: - type: integer - title: Id - name: - type: string - title: Name - uri: - type: string - title: Uri - group: - type: string - title: Group - extra: - anyOf: - - additionalProperties: true - type: object - - type: 'null' - title: Extra - created_at: - type: string - format: date-time - title: Created At - updated_at: - type: string - format: date-time - title: Updated At - consuming_dags: - items: - $ref: '#/components/schemas/DagScheduleAssetReference' - type: array - title: Consuming Dags - producing_tasks: - items: - $ref: '#/components/schemas/TaskOutletAssetReference' - type: array - title: Producing Tasks - aliases: - items: - $ref: '#/components/schemas/AssetAliasResponse' - type: array - title: Aliases - type: object - required: - - id - - name - - uri - - group - - created_at - - updated_at - - consuming_dags - - producing_tasks - - aliases - title: AssetResponse - description: Asset serializer for responses. - BackfillCollectionResponse: - properties: - backfills: - items: - $ref: '#/components/schemas/BackfillResponse' - type: array - title: Backfills - total_entries: - type: integer - title: Total Entries - type: object - required: - - backfills - - total_entries - title: BackfillCollectionResponse - description: Backfill Collection serializer for responses. - BackfillPostBody: - properties: - dag_id: - type: string - title: Dag Id - from_date: - type: string - format: date-time - title: From Date - to_date: - type: string - format: date-time - title: To Date - run_backwards: - type: boolean - title: Run Backwards - default: false - dag_run_conf: - additionalProperties: true - type: object - title: Dag Run Conf - default: {} - reprocess_behavior: - $ref: '#/components/schemas/ReprocessBehavior' - default: none - max_active_runs: - type: integer - title: Max Active Runs - default: 10 - additionalProperties: false - type: object - required: - - dag_id - - from_date - - to_date - title: BackfillPostBody - description: Object used for create backfill request. - BackfillResponse: - properties: - id: - type: integer - title: Id - dag_id: - type: string - title: Dag Id - from_date: - type: string - format: date-time - title: From Date - to_date: - type: string - format: date-time - title: To Date - dag_run_conf: - additionalProperties: true - type: object - title: Dag Run Conf - is_paused: - type: boolean - title: Is Paused - reprocess_behavior: - $ref: '#/components/schemas/ReprocessBehavior' - max_active_runs: - type: integer - title: Max Active Runs - created_at: - type: string - format: date-time - title: Created At - completed_at: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Completed At - updated_at: - type: string - format: date-time - title: Updated At - type: object - required: - - id - - dag_id - - from_date - - to_date - - dag_run_conf - - is_paused - - reprocess_behavior - - max_active_runs - - created_at - - completed_at - - updated_at - title: BackfillResponse - description: Base serializer for Backfill. - BaseInfoResponse: - properties: - status: - anyOf: - - type: string - - type: 'null' - title: Status - type: object - required: - - status - title: BaseInfoResponse - description: Base info serializer for responses. - BulkAction: - type: string - enum: - - create - - delete - - update - title: BulkAction - description: Bulk Action to be performed on the used model. - BulkActionNotOnExistence: - type: string - enum: - - fail - - skip - title: BulkActionNotOnExistence - description: Bulk Action to be taken if the entity does not exist. - BulkActionOnExistence: - type: string - enum: - - fail - - skip - - overwrite - title: BulkActionOnExistence - description: Bulk Action to be taken if the entity already exists or not. - BulkActionResponse: - properties: - success: - items: - type: string - type: array - title: Success - description: A list of unique id/key representing successful operations. - default: [] - errors: - items: - additionalProperties: true - type: object - type: array - title: Errors - description: A list of errors encountered during the operation, each containing - details about the issue. - default: [] - type: object - title: BulkActionResponse - description: 'Serializer for individual bulk action responses. - - - Represents the outcome of a single bulk operation (create, update, or delete). - - The response includes a list of successful keys and any errors encountered - during the operation. - - This structure helps users understand which key actions succeeded and which - failed.' - BulkBody_ConnectionBody_: - properties: - actions: - items: - oneOf: - - $ref: '#/components/schemas/BulkCreateAction_ConnectionBody_' - - $ref: '#/components/schemas/BulkUpdateAction_ConnectionBody_' - - $ref: '#/components/schemas/BulkDeleteAction_ConnectionBody_' - type: array - title: Actions - additionalProperties: false - type: object - required: - - actions - title: BulkBody[ConnectionBody] - BulkBody_PoolBody_: - properties: - actions: - items: - oneOf: - - $ref: '#/components/schemas/BulkCreateAction_PoolBody_' - - $ref: '#/components/schemas/BulkUpdateAction_PoolBody_' - - $ref: '#/components/schemas/BulkDeleteAction_PoolBody_' - type: array - title: Actions - additionalProperties: false - type: object - required: - - actions - title: BulkBody[PoolBody] - BulkBody_VariableBody_: - properties: - actions: - items: - oneOf: - - $ref: '#/components/schemas/BulkCreateAction_VariableBody_' - - $ref: '#/components/schemas/BulkUpdateAction_VariableBody_' - - $ref: '#/components/schemas/BulkDeleteAction_VariableBody_' - type: array - title: Actions - additionalProperties: false - type: object - required: - - actions - title: BulkBody[VariableBody] - BulkCreateAction_ConnectionBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - $ref: '#/components/schemas/ConnectionBody' - type: array - title: Entities - description: A list of entities to be created. - action_on_existence: - $ref: '#/components/schemas/BulkActionOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkCreateAction[ConnectionBody] - BulkCreateAction_PoolBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - $ref: '#/components/schemas/PoolBody' - type: array - title: Entities - description: A list of entities to be created. - action_on_existence: - $ref: '#/components/schemas/BulkActionOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkCreateAction[PoolBody] - BulkCreateAction_VariableBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - $ref: '#/components/schemas/VariableBody' - type: array - title: Entities - description: A list of entities to be created. - action_on_existence: - $ref: '#/components/schemas/BulkActionOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkCreateAction[VariableBody] - BulkDeleteAction_ConnectionBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - type: string - type: array - title: Entities - description: A list of entity id/key to be deleted. - action_on_non_existence: - $ref: '#/components/schemas/BulkActionNotOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkDeleteAction[ConnectionBody] - BulkDeleteAction_PoolBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - type: string - type: array - title: Entities - description: A list of entity id/key to be deleted. - action_on_non_existence: - $ref: '#/components/schemas/BulkActionNotOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkDeleteAction[PoolBody] - BulkDeleteAction_VariableBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - type: string - type: array - title: Entities - description: A list of entity id/key to be deleted. - action_on_non_existence: - $ref: '#/components/schemas/BulkActionNotOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkDeleteAction[VariableBody] - BulkResponse: - properties: - create: - anyOf: - - $ref: '#/components/schemas/BulkActionResponse' - - type: 'null' - description: Details of the bulk create operation, including successful - keys and errors. - update: - anyOf: - - $ref: '#/components/schemas/BulkActionResponse' - - type: 'null' - description: Details of the bulk update operation, including successful - keys and errors. - delete: - anyOf: - - $ref: '#/components/schemas/BulkActionResponse' - - type: 'null' - description: Details of the bulk delete operation, including successful - keys and errors. - type: object - title: BulkResponse - description: 'Serializer for responses to bulk entity operations. - - - This represents the results of create, update, and delete actions performed - on entity in bulk. - - Each action (if requested) is represented as a field containing details about - successful keys and any encountered errors. - - Fields are populated in the response only if the respective action was part - of the request, else are set None.' - BulkUpdateAction_ConnectionBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - $ref: '#/components/schemas/ConnectionBody' - type: array - title: Entities - description: A list of entities to be updated. - action_on_non_existence: - $ref: '#/components/schemas/BulkActionNotOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkUpdateAction[ConnectionBody] - BulkUpdateAction_PoolBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - $ref: '#/components/schemas/PoolBody' - type: array - title: Entities - description: A list of entities to be updated. - action_on_non_existence: - $ref: '#/components/schemas/BulkActionNotOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkUpdateAction[PoolBody] - BulkUpdateAction_VariableBody_: - properties: - action: - $ref: '#/components/schemas/BulkAction' - description: The action to be performed on the entities. - entities: - items: - $ref: '#/components/schemas/VariableBody' - type: array - title: Entities - description: A list of entities to be updated. - action_on_non_existence: - $ref: '#/components/schemas/BulkActionNotOnExistence' - default: fail - additionalProperties: false - type: object - required: - - action - - entities - title: BulkUpdateAction[VariableBody] - ClearTaskInstancesBody: - properties: - dry_run: - type: boolean - title: Dry Run - default: true - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - only_failed: - type: boolean - title: Only Failed - default: true - only_running: - type: boolean - title: Only Running - default: false - reset_dag_runs: - type: boolean - title: Reset Dag Runs - default: true - task_ids: - anyOf: - - items: - anyOf: - - type: string - - prefixItems: - - type: string - - type: integer - type: array - maxItems: 2 - minItems: 2 - type: array - - type: 'null' - title: Task Ids - dag_run_id: - anyOf: - - type: string - - type: 'null' - title: Dag Run Id - include_upstream: - type: boolean - title: Include Upstream - default: false - include_downstream: - type: boolean - title: Include Downstream - default: false - include_future: - type: boolean - title: Include Future - default: false - include_past: - type: boolean - title: Include Past - default: false - additionalProperties: false - type: object - title: ClearTaskInstancesBody - description: Request body for Clear Task Instances endpoint. - Config: - properties: - sections: - items: - $ref: '#/components/schemas/ConfigSection' - type: array - title: Sections - additionalProperties: false - type: object - required: - - sections - title: Config - description: List of config sections with their options. - ConfigOption: - properties: - key: - type: string - title: Key - value: - anyOf: - - type: string - - prefixItems: - - type: string - - type: string - type: array - maxItems: 2 - minItems: 2 - title: Value - additionalProperties: false - type: object - required: - - key - - value - title: ConfigOption - description: Config option. - ConfigSection: - properties: - name: - type: string - title: Name - options: - items: - $ref: '#/components/schemas/ConfigOption' - type: array - title: Options - additionalProperties: false - type: object - required: - - name - - options - title: ConfigSection - description: Config Section Schema. - ConnectionBody: - properties: - connection_id: - type: string - maxLength: 200 - pattern: ^[\w.-]+$ - title: Connection Id - conn_type: - type: string - title: Conn Type - description: - anyOf: - - type: string - - type: 'null' - title: Description - host: - anyOf: - - type: string - - type: 'null' - title: Host - login: - anyOf: - - type: string - - type: 'null' - title: Login - schema: - anyOf: - - type: string - - type: 'null' - title: Schema - port: - anyOf: - - type: integer - - type: 'null' - title: Port - password: - anyOf: - - type: string - - type: 'null' - title: Password - extra: - anyOf: - - type: string - - type: 'null' - title: Extra - additionalProperties: false - type: object - required: - - connection_id - - conn_type - title: ConnectionBody - description: Connection Serializer for requests body. - ConnectionCollectionResponse: - properties: - connections: - items: - $ref: '#/components/schemas/ConnectionResponse' - type: array - title: Connections - total_entries: - type: integer - title: Total Entries - type: object - required: - - connections - - total_entries - title: ConnectionCollectionResponse - description: Connection Collection serializer for responses. - ConnectionResponse: - properties: - connection_id: - type: string - title: Connection Id - conn_type: - type: string - title: Conn Type - description: - anyOf: - - type: string - - type: 'null' - title: Description - host: - anyOf: - - type: string - - type: 'null' - title: Host - login: - anyOf: - - type: string - - type: 'null' - title: Login - schema: - anyOf: - - type: string - - type: 'null' - title: Schema - port: - anyOf: - - type: integer - - type: 'null' - title: Port - password: - anyOf: - - type: string - - type: 'null' - title: Password - extra: - anyOf: - - type: string - - type: 'null' - title: Extra - type: object - required: - - connection_id - - conn_type - - description - - host - - login - - schema - - port - - password - - extra - title: ConnectionResponse - description: Connection serializer for responses. - ConnectionTestResponse: - properties: - status: - type: boolean - title: Status - message: - type: string - title: Message - type: object - required: - - status - - message - title: ConnectionTestResponse - description: Connection Test serializer for responses. - CreateAssetEventsBody: - properties: - asset_id: - type: integer - title: Asset Id - extra: - additionalProperties: true - type: object - title: Extra - additionalProperties: false - type: object - required: - - asset_id - title: CreateAssetEventsBody - description: Create asset events request. - DAGCollectionResponse: - properties: - dags: - items: - $ref: '#/components/schemas/DAGResponse' - type: array - title: Dags - total_entries: - type: integer - title: Total Entries - type: object - required: - - dags - - total_entries - title: DAGCollectionResponse - description: DAG Collection serializer for responses. - DAGDetailsResponse: - properties: - dag_id: - type: string - title: Dag Id - dag_display_name: - type: string - title: Dag Display Name - is_paused: - type: boolean - title: Is Paused - is_stale: - type: boolean - title: Is Stale - last_parsed_time: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Parsed Time - last_expired: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Expired - bundle_name: - anyOf: - - type: string - - type: 'null' - title: Bundle Name - relative_fileloc: - anyOf: - - type: string - - type: 'null' - title: Relative Fileloc - fileloc: - type: string - title: Fileloc - description: - anyOf: - - type: string - - type: 'null' - title: Description - timetable_summary: - anyOf: - - type: string - - type: 'null' - title: Timetable Summary - timetable_description: - anyOf: - - type: string - - type: 'null' - title: Timetable Description - tags: - items: - $ref: '#/components/schemas/DagTagResponse' - type: array - title: Tags - max_active_tasks: - type: integer - title: Max Active Tasks - max_active_runs: - anyOf: - - type: integer - - type: 'null' - title: Max Active Runs - max_consecutive_failed_dag_runs: - type: integer - title: Max Consecutive Failed Dag Runs - has_task_concurrency_limits: - type: boolean - title: Has Task Concurrency Limits - has_import_errors: - type: boolean - title: Has Import Errors - next_dagrun_logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Logical Date - next_dagrun_data_interval_start: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Data Interval Start - next_dagrun_data_interval_end: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Data Interval End - next_dagrun_run_after: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Run After - owners: - items: - type: string - type: array - title: Owners - catchup: - type: boolean - title: Catchup - dag_run_timeout: - anyOf: - - type: string - format: duration - - type: 'null' - title: Dag Run Timeout - asset_expression: - anyOf: - - additionalProperties: true - type: object - - type: 'null' - title: Asset Expression - doc_md: - anyOf: - - type: string - - type: 'null' - title: Doc Md - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - is_paused_upon_creation: - anyOf: - - type: boolean - - type: 'null' - title: Is Paused Upon Creation - params: - anyOf: - - additionalProperties: true - type: object - - type: 'null' - title: Params - render_template_as_native_obj: - type: boolean - title: Render Template As Native Obj - template_search_path: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Template Search Path - timezone: - anyOf: - - type: string - - type: 'null' - title: Timezone - last_parsed: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Parsed - file_token: - type: string - title: File Token - description: Return file token. - readOnly: true - concurrency: - type: integer - title: Concurrency - description: Return max_active_tasks as concurrency. - readOnly: true - latest_dag_version: - anyOf: - - $ref: '#/components/schemas/DagVersionResponse' - - type: 'null' - description: Return the latest DagVersion. - readOnly: true - type: object - required: - - dag_id - - dag_display_name - - is_paused - - is_stale - - last_parsed_time - - last_expired - - bundle_name - - relative_fileloc - - fileloc - - description - - timetable_summary - - timetable_description - - tags - - max_active_tasks - - max_active_runs - - max_consecutive_failed_dag_runs - - has_task_concurrency_limits - - has_import_errors - - next_dagrun_logical_date - - next_dagrun_data_interval_start - - next_dagrun_data_interval_end - - next_dagrun_run_after - - owners - - catchup - - dag_run_timeout - - asset_expression - - doc_md - - start_date - - end_date - - is_paused_upon_creation - - params - - render_template_as_native_obj - - template_search_path - - timezone - - last_parsed - - file_token - - concurrency - - latest_dag_version - title: DAGDetailsResponse - description: Specific serializer for DAG Details responses. - DAGPatchBody: - properties: - is_paused: - type: boolean - title: Is Paused - additionalProperties: false - type: object - required: - - is_paused - title: DAGPatchBody - description: Dag Serializer for updatable bodies. - DAGResponse: - properties: - dag_id: - type: string - title: Dag Id - dag_display_name: - type: string - title: Dag Display Name - is_paused: - type: boolean - title: Is Paused - is_stale: - type: boolean - title: Is Stale - last_parsed_time: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Parsed Time - last_expired: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Expired - bundle_name: - anyOf: - - type: string - - type: 'null' - title: Bundle Name - relative_fileloc: - anyOf: - - type: string - - type: 'null' - title: Relative Fileloc - fileloc: - type: string - title: Fileloc - description: - anyOf: - - type: string - - type: 'null' - title: Description - timetable_summary: - anyOf: - - type: string - - type: 'null' - title: Timetable Summary - timetable_description: - anyOf: - - type: string - - type: 'null' - title: Timetable Description - tags: - items: - $ref: '#/components/schemas/DagTagResponse' - type: array - title: Tags - max_active_tasks: - type: integer - title: Max Active Tasks - max_active_runs: - anyOf: - - type: integer - - type: 'null' - title: Max Active Runs - max_consecutive_failed_dag_runs: - type: integer - title: Max Consecutive Failed Dag Runs - has_task_concurrency_limits: - type: boolean - title: Has Task Concurrency Limits - has_import_errors: - type: boolean - title: Has Import Errors - next_dagrun_logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Logical Date - next_dagrun_data_interval_start: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Data Interval Start - next_dagrun_data_interval_end: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Data Interval End - next_dagrun_run_after: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Next Dagrun Run After - owners: - items: - type: string - type: array - title: Owners - file_token: - type: string - title: File Token - description: Return file token. - readOnly: true - type: object - required: - - dag_id - - dag_display_name - - is_paused - - is_stale - - last_parsed_time - - last_expired - - bundle_name - - relative_fileloc - - fileloc - - description - - timetable_summary - - timetable_description - - tags - - max_active_tasks - - max_active_runs - - max_consecutive_failed_dag_runs - - has_task_concurrency_limits - - has_import_errors - - next_dagrun_logical_date - - next_dagrun_data_interval_start - - next_dagrun_data_interval_end - - next_dagrun_run_after - - owners - - file_token - title: DAGResponse - description: DAG serializer for responses. - DAGRunClearBody: - properties: - dry_run: - type: boolean - title: Dry Run - default: true - only_failed: - type: boolean - title: Only Failed - default: false - additionalProperties: false - type: object - title: DAGRunClearBody - description: DAG Run serializer for clear endpoint body. - DAGRunCollectionResponse: - properties: - dag_runs: - items: - $ref: '#/components/schemas/DAGRunResponse' - type: array - title: Dag Runs - total_entries: - type: integer - title: Total Entries - type: object - required: - - dag_runs - - total_entries - title: DAGRunCollectionResponse - description: DAG Run Collection serializer for responses. - DAGRunPatchBody: - properties: - state: - anyOf: - - $ref: '#/components/schemas/DAGRunPatchStates' - - type: 'null' - note: - anyOf: - - type: string - maxLength: 1000 - - type: 'null' - title: Note - additionalProperties: false - type: object - title: DAGRunPatchBody - description: DAG Run Serializer for PATCH requests. - DAGRunPatchStates: - type: string - enum: - - queued - - success - - failed - title: DAGRunPatchStates - description: Enum for DAG Run states when updating a DAG Run. - DAGRunResponse: - properties: - dag_run_id: - type: string - title: Dag Run Id - dag_id: - type: string - title: Dag Id - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - queued_at: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Queued At - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - data_interval_start: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval Start - data_interval_end: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval End - run_after: - type: string - format: date-time - title: Run After - last_scheduling_decision: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Last Scheduling Decision - run_type: - $ref: '#/components/schemas/DagRunType' - state: - $ref: '#/components/schemas/DagRunState' - triggered_by: - anyOf: - - $ref: '#/components/schemas/DagRunTriggeredByType' - - type: 'null' - conf: - additionalProperties: true - type: object - title: Conf - note: - anyOf: - - type: string - - type: 'null' - title: Note - dag_versions: - items: - $ref: '#/components/schemas/DagVersionResponse' - type: array - title: Dag Versions - type: object - required: - - dag_run_id - - dag_id - - logical_date - - queued_at - - start_date - - end_date - - data_interval_start - - data_interval_end - - run_after - - last_scheduling_decision - - run_type - - state - - triggered_by - - conf - - note - - dag_versions - title: DAGRunResponse - description: DAG Run serializer for responses. - DAGRunsBatchBody: - properties: - order_by: - anyOf: - - type: string - - type: 'null' - title: Order By - page_offset: - type: integer - minimum: 0.0 - title: Page Offset - default: 0 - page_limit: - type: integer - minimum: 0.0 - title: Page Limit - default: 100 - dag_ids: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Dag Ids - states: - anyOf: - - items: - anyOf: - - $ref: '#/components/schemas/DagRunState' - - type: 'null' - type: array - - type: 'null' - title: States - run_after_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Gte - run_after_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Lte - logical_date_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Gte - logical_date_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Lte - start_date_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Gte - start_date_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Lte - end_date_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Gte - end_date_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Lte - additionalProperties: false - type: object - title: DAGRunsBatchBody - description: List DAG Runs body for batch endpoint. - DAGSourceResponse: - properties: - content: - anyOf: - - type: string - - type: 'null' - title: Content - dag_id: - type: string - title: Dag Id - version_number: - anyOf: - - type: integer - - type: 'null' - title: Version Number - type: object - required: - - content - - dag_id - - version_number - title: DAGSourceResponse - description: DAG Source serializer for responses. - DAGTagCollectionResponse: - properties: - tags: - items: - type: string - type: array - title: Tags - total_entries: - type: integer - title: Total Entries - type: object - required: - - tags - - total_entries - title: DAGTagCollectionResponse - description: DAG Tags Collection serializer for responses. - DAGVersionCollectionResponse: - properties: - dag_versions: - items: - $ref: '#/components/schemas/DagVersionResponse' - type: array - title: Dag Versions - total_entries: - type: integer - title: Total Entries - type: object - required: - - dag_versions - - total_entries - title: DAGVersionCollectionResponse - description: DAG Version Collection serializer for responses. - DAGWarningCollectionResponse: - properties: - dag_warnings: - items: - $ref: '#/components/schemas/DAGWarningResponse' - type: array - title: Dag Warnings - total_entries: - type: integer - title: Total Entries - type: object - required: - - dag_warnings - - total_entries - title: DAGWarningCollectionResponse - description: DAG warning collection serializer for responses. - DAGWarningResponse: - properties: - dag_id: - type: string - title: Dag Id - warning_type: - $ref: '#/components/schemas/DagWarningType' - message: - type: string - title: Message - timestamp: - type: string - format: date-time - title: Timestamp - type: object - required: - - dag_id - - warning_type - - message - - timestamp - title: DAGWarningResponse - description: DAG Warning serializer for responses. - DagProcessorInfoResponse: - properties: - status: - anyOf: - - type: string - - type: 'null' - title: Status - latest_dag_processor_heartbeat: - anyOf: - - type: string - - type: 'null' - title: Latest Dag Processor Heartbeat - type: object - required: - - status - - latest_dag_processor_heartbeat - title: DagProcessorInfoResponse - description: DagProcessor info serializer for responses. - DagRunAssetReference: - properties: - run_id: - type: string - title: Run Id - dag_id: - type: string - title: Dag Id - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - start_date: - type: string - format: date-time - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - state: - type: string - title: State - data_interval_start: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval Start - data_interval_end: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval End - additionalProperties: false - type: object - required: - - run_id - - dag_id - - logical_date - - start_date - - end_date - - state - - data_interval_start - - data_interval_end - title: DagRunAssetReference - description: DAGRun serializer for asset responses. - DagRunState: - type: string - enum: - - queued - - running - - success - - failed - title: DagRunState - description: 'All possible states that a DagRun can be in. - - - These are "shared" with TaskInstanceState in some parts of the code, - - so please ensure that their values always match the ones with the - - same name in TaskInstanceState.' - DagRunTriggeredByType: - type: string - enum: - - cli - - operator - - rest_api - - ui - - test - - timetable - - asset - - backfill - title: DagRunTriggeredByType - description: Class with TriggeredBy types for DagRun. - DagRunType: - type: string - enum: - - backfill - - scheduled - - manual - - asset_triggered - title: DagRunType - description: Class with DagRun types. - DagScheduleAssetReference: - properties: - dag_id: - type: string - title: Dag Id - created_at: - type: string - format: date-time - title: Created At - updated_at: - type: string - format: date-time - title: Updated At - additionalProperties: false - type: object - required: - - dag_id - - created_at - - updated_at - title: DagScheduleAssetReference - description: DAG schedule reference serializer for assets. - DagStatsCollectionResponse: - properties: - dags: - items: - $ref: '#/components/schemas/DagStatsResponse' - type: array - title: Dags - total_entries: - type: integer - title: Total Entries - type: object - required: - - dags - - total_entries - title: DagStatsCollectionResponse - description: DAG Stats Collection serializer for responses. - DagStatsResponse: - properties: - dag_id: - type: string - title: Dag Id - stats: - items: - $ref: '#/components/schemas/DagStatsStateResponse' - type: array - title: Stats - type: object - required: - - dag_id - - stats - title: DagStatsResponse - description: DAG Stats serializer for responses. - DagStatsStateResponse: - properties: - state: - $ref: '#/components/schemas/DagRunState' - count: - type: integer - title: Count - type: object - required: - - state - - count - title: DagStatsStateResponse - description: DagStatsState serializer for responses. - DagTagResponse: - properties: - name: - type: string - title: Name - dag_id: - type: string - title: Dag Id - type: object - required: - - name - - dag_id - title: DagTagResponse - description: DAG Tag serializer for responses. - DagVersionResponse: - properties: - id: - type: string - format: uuid - title: Id - version_number: - type: integer - title: Version Number - dag_id: - type: string - title: Dag Id - bundle_name: - anyOf: - - type: string - - type: 'null' - title: Bundle Name - bundle_version: - anyOf: - - type: string - - type: 'null' - title: Bundle Version - created_at: - type: string - format: date-time - title: Created At - bundle_url: - anyOf: - - type: string - - type: 'null' - title: Bundle Url - readOnly: true - type: object - required: - - id - - version_number - - dag_id - - bundle_name - - bundle_version - - created_at - - bundle_url - title: DagVersionResponse - description: Dag Version serializer for responses. - DagWarningType: - type: string - enum: - - asset conflict - - non-existent pool - title: DagWarningType - description: 'Enum for DAG warning types. - - - This is the set of allowable values for the ``warning_type`` field - - in the DagWarning model.' - DryRunBackfillCollectionResponse: - properties: - backfills: - items: - $ref: '#/components/schemas/DryRunBackfillResponse' - type: array - title: Backfills - total_entries: - type: integer - title: Total Entries - type: object - required: - - backfills - - total_entries - title: DryRunBackfillCollectionResponse - description: Backfill collection serializer for responses in dry-run mode. - DryRunBackfillResponse: - properties: - logical_date: - type: string - format: date-time - title: Logical Date - type: object - required: - - logical_date - title: DryRunBackfillResponse - description: Backfill serializer for responses in dry-run mode. - EventLogCollectionResponse: - properties: - event_logs: - items: - $ref: '#/components/schemas/EventLogResponse' - type: array - title: Event Logs - total_entries: - type: integer - title: Total Entries - type: object - required: - - event_logs - - total_entries - title: EventLogCollectionResponse - description: Event Log Collection Response. - EventLogResponse: - properties: - event_log_id: - type: integer - title: Event Log Id - when: - type: string - format: date-time - title: When - dag_id: - anyOf: - - type: string - - type: 'null' - title: Dag Id - task_id: - anyOf: - - type: string - - type: 'null' - title: Task Id - run_id: - anyOf: - - type: string - - type: 'null' - title: Run Id - map_index: - anyOf: - - type: integer - - type: 'null' - title: Map Index - try_number: - anyOf: - - type: integer - - type: 'null' - title: Try Number - event: - type: string - title: Event - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - owner: - anyOf: - - type: string - - type: 'null' - title: Owner - extra: - anyOf: - - type: string - - type: 'null' - title: Extra - type: object - required: - - event_log_id - - when - - dag_id - - task_id - - run_id - - map_index - - try_number - - event - - logical_date - - owner - - extra - title: EventLogResponse - description: Event Log Response. - ExtraLinkCollectionResponse: - properties: - extra_links: - additionalProperties: - anyOf: - - type: string - - type: 'null' - type: object - title: Extra Links - total_entries: - type: integer - title: Total Entries - type: object - required: - - extra_links - - total_entries - title: ExtraLinkCollectionResponse - description: Extra Links Response. - FastAPIAppResponse: - properties: - app: - type: string - title: App - url_prefix: - type: string - title: Url Prefix - name: - type: string - title: Name - additionalProperties: true - type: object - required: - - app - - url_prefix - - name - title: FastAPIAppResponse - description: Serializer for Plugin FastAPI App responses. - FastAPIRootMiddlewareResponse: - properties: - middleware: - type: string - title: Middleware - name: - type: string - title: Name - additionalProperties: true - type: object - required: - - middleware - - name - title: FastAPIRootMiddlewareResponse - description: Serializer for Plugin FastAPI root middleware responses. - HTTPExceptionResponse: - properties: - detail: - anyOf: - - type: string - - additionalProperties: true - type: object - title: Detail - type: object - required: - - detail - title: HTTPExceptionResponse - description: HTTPException Model used for error response. - HTTPValidationError: - properties: - detail: - items: - $ref: '#/components/schemas/ValidationError' - type: array - title: Detail - type: object - title: HTTPValidationError - HealthInfoResponse: - properties: - metadatabase: - $ref: '#/components/schemas/BaseInfoResponse' - scheduler: - $ref: '#/components/schemas/SchedulerInfoResponse' - triggerer: - $ref: '#/components/schemas/TriggererInfoResponse' - dag_processor: - anyOf: - - $ref: '#/components/schemas/DagProcessorInfoResponse' - - type: 'null' - type: object - required: - - metadatabase - - scheduler - - triggerer - title: HealthInfoResponse - description: Health serializer for responses. - ImportErrorCollectionResponse: - properties: - import_errors: - items: - $ref: '#/components/schemas/ImportErrorResponse' - type: array - title: Import Errors - total_entries: - type: integer - title: Total Entries - type: object - required: - - import_errors - - total_entries - title: ImportErrorCollectionResponse - description: Import Error Collection Response. - ImportErrorResponse: - properties: - import_error_id: - type: integer - title: Import Error Id - timestamp: - type: string - format: date-time - title: Timestamp - filename: - type: string - title: Filename - bundle_name: - anyOf: - - type: string - - type: 'null' - title: Bundle Name - stack_trace: - type: string - title: Stack Trace - type: object - required: - - import_error_id - - timestamp - - filename - - bundle_name - - stack_trace - title: ImportErrorResponse - description: Import Error Response. - JobCollectionResponse: - properties: - jobs: - items: - $ref: '#/components/schemas/JobResponse' - type: array - title: Jobs - total_entries: - type: integer - title: Total Entries - type: object - required: - - jobs - - total_entries - title: JobCollectionResponse - description: Job Collection Response. - JobResponse: - properties: - id: - type: integer - title: Id - dag_id: - anyOf: - - type: string - - type: 'null' - title: Dag Id - state: - anyOf: - - type: string - - type: 'null' - title: State - job_type: - anyOf: - - type: string - - type: 'null' - title: Job Type - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - latest_heartbeat: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Latest Heartbeat - executor_class: - anyOf: - - type: string - - type: 'null' - title: Executor Class - hostname: - anyOf: - - type: string - - type: 'null' - title: Hostname - unixname: - anyOf: - - type: string - - type: 'null' - title: Unixname - type: object - required: - - id - - dag_id - - state - - job_type - - start_date - - end_date - - latest_heartbeat - - executor_class - - hostname - - unixname - title: JobResponse - description: Job serializer for responses. - PatchTaskInstanceBody: - properties: - new_state: - anyOf: - - $ref: '#/components/schemas/TaskInstanceState' - - type: 'null' - note: - anyOf: - - type: string - maxLength: 1000 - - type: 'null' - title: Note - include_upstream: - type: boolean - title: Include Upstream - default: false - include_downstream: - type: boolean - title: Include Downstream - default: false - include_future: - type: boolean - title: Include Future - default: false - include_past: - type: boolean - title: Include Past - default: false - additionalProperties: false - type: object - title: PatchTaskInstanceBody - description: Request body for Clear Task Instances endpoint. - PluginCollectionResponse: - properties: - plugins: - items: - $ref: '#/components/schemas/PluginResponse' - type: array - title: Plugins - total_entries: - type: integer - title: Total Entries - type: object - required: - - plugins - - total_entries - title: PluginCollectionResponse - description: Plugin Collection serializer. - PluginResponse: - properties: - name: - type: string - title: Name - macros: - items: - type: string - type: array - title: Macros - flask_blueprints: - items: - type: string - type: array - title: Flask Blueprints - fastapi_apps: - items: - $ref: '#/components/schemas/FastAPIAppResponse' - type: array - title: Fastapi Apps - fastapi_root_middlewares: - items: - $ref: '#/components/schemas/FastAPIRootMiddlewareResponse' - type: array - title: Fastapi Root Middlewares - appbuilder_views: - items: - $ref: '#/components/schemas/AppBuilderViewResponse' - type: array - title: Appbuilder Views - appbuilder_menu_items: - items: - $ref: '#/components/schemas/AppBuilderMenuItemResponse' - type: array - title: Appbuilder Menu Items - global_operator_extra_links: - items: - type: string - type: array - title: Global Operator Extra Links - operator_extra_links: - items: - type: string - type: array - title: Operator Extra Links - source: - type: string - title: Source - listeners: - items: - type: string - type: array - title: Listeners - timetables: - items: - type: string - type: array - title: Timetables - type: object - required: - - name - - macros - - flask_blueprints - - fastapi_apps - - fastapi_root_middlewares - - appbuilder_views - - appbuilder_menu_items - - global_operator_extra_links - - operator_extra_links - - source - - listeners - - timetables - title: PluginResponse - description: Plugin serializer. - PoolBody: - properties: - name: - type: string - maxLength: 256 - title: Name - slots: - type: integer - title: Slots - description: - anyOf: - - type: string - - type: 'null' - title: Description - include_deferred: - type: boolean - title: Include Deferred - default: false - additionalProperties: false - type: object - required: - - name - - slots - title: PoolBody - description: Pool serializer for post bodies. - PoolCollectionResponse: - properties: - pools: - items: - $ref: '#/components/schemas/PoolResponse' - type: array - title: Pools - total_entries: - type: integer - title: Total Entries - type: object - required: - - pools - - total_entries - title: PoolCollectionResponse - description: Pool Collection serializer for responses. - PoolPatchBody: - properties: - pool: - anyOf: - - type: string - - type: 'null' - title: Pool - slots: - anyOf: - - type: integer - - type: 'null' - title: Slots - description: - anyOf: - - type: string - - type: 'null' - title: Description - include_deferred: - anyOf: - - type: boolean - - type: 'null' - title: Include Deferred - additionalProperties: false - type: object - title: PoolPatchBody - description: Pool serializer for patch bodies. - PoolResponse: - properties: - name: - type: string - title: Name - slots: - type: integer - title: Slots - description: - anyOf: - - type: string - - type: 'null' - title: Description - include_deferred: - type: boolean - title: Include Deferred - occupied_slots: - type: integer - title: Occupied Slots - running_slots: - type: integer - title: Running Slots - queued_slots: - type: integer - title: Queued Slots - scheduled_slots: - type: integer - title: Scheduled Slots - open_slots: - type: integer - title: Open Slots - deferred_slots: - type: integer - title: Deferred Slots - type: object - required: - - name - - slots - - description - - include_deferred - - occupied_slots - - running_slots - - queued_slots - - scheduled_slots - - open_slots - - deferred_slots - title: PoolResponse - description: Pool serializer for responses. - ProviderCollectionResponse: - properties: - providers: - items: - $ref: '#/components/schemas/ProviderResponse' - type: array - title: Providers - total_entries: - type: integer - title: Total Entries - type: object - required: - - providers - - total_entries - title: ProviderCollectionResponse - description: Provider Collection serializer for responses. - ProviderResponse: - properties: - package_name: - type: string - title: Package Name - description: - type: string - title: Description - version: - type: string - title: Version - type: object - required: - - package_name - - description - - version - title: ProviderResponse - description: Provider serializer for responses. - QueuedEventCollectionResponse: - properties: - queued_events: - items: - $ref: '#/components/schemas/QueuedEventResponse' - type: array - title: Queued Events - total_entries: - type: integer - title: Total Entries - type: object - required: - - queued_events - - total_entries - title: QueuedEventCollectionResponse - description: Queued Event Collection serializer for responses. - QueuedEventResponse: - properties: - dag_id: - type: string - title: Dag Id - asset_id: - type: integer - title: Asset Id - created_at: - type: string - format: date-time - title: Created At - type: object - required: - - dag_id - - asset_id - - created_at - title: QueuedEventResponse - description: Queued Event serializer for responses.. - ReprocessBehavior: - type: string - enum: - - failed - - completed - - none - title: ReprocessBehavior - description: 'Internal enum for setting reprocess behavior in a backfill. - - - :meta private:' - SchedulerInfoResponse: - properties: - status: - anyOf: - - type: string - - type: 'null' - title: Status - latest_scheduler_heartbeat: - anyOf: - - type: string - - type: 'null' - title: Latest Scheduler Heartbeat - type: object - required: - - status - - latest_scheduler_heartbeat - title: SchedulerInfoResponse - description: Scheduler info serializer for responses. - StructuredLogMessage: - properties: - timestamp: - type: string - format: date-time - title: Timestamp - event: - type: string - title: Event - additionalProperties: true - type: object - required: - - event - title: StructuredLogMessage - description: An individual log message. - TaskCollectionResponse: - properties: - tasks: - items: - $ref: '#/components/schemas/TaskResponse' - type: array - title: Tasks - total_entries: - type: integer - title: Total Entries - type: object - required: - - tasks - - total_entries - title: TaskCollectionResponse - description: Task collection serializer for responses. - TaskDependencyCollectionResponse: - properties: - dependencies: - items: - $ref: '#/components/schemas/TaskDependencyResponse' - type: array - title: Dependencies - type: object - required: - - dependencies - title: TaskDependencyCollectionResponse - description: Task scheduling dependencies collection serializer for responses. - TaskDependencyResponse: - properties: - name: - type: string - title: Name - reason: - type: string - title: Reason - type: object - required: - - name - - reason - title: TaskDependencyResponse - description: Task Dependency serializer for responses. - TaskInstanceCollectionResponse: - properties: - task_instances: - items: - $ref: '#/components/schemas/TaskInstanceResponse' - type: array - title: Task Instances - total_entries: - type: integer - title: Total Entries - type: object - required: - - task_instances - - total_entries - title: TaskInstanceCollectionResponse - description: Task Instance Collection serializer for responses. - TaskInstanceHistoryCollectionResponse: - properties: - task_instances: - items: - $ref: '#/components/schemas/TaskInstanceHistoryResponse' - type: array - title: Task Instances - total_entries: - type: integer - title: Total Entries - type: object - required: - - task_instances - - total_entries - title: TaskInstanceHistoryCollectionResponse - description: TaskInstanceHistory Collection serializer for responses. - TaskInstanceHistoryResponse: - properties: - task_id: - type: string - title: Task Id - dag_id: - type: string - title: Dag Id - dag_run_id: - type: string - title: Dag Run Id - map_index: - type: integer - title: Map Index - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - duration: - anyOf: - - type: number - - type: 'null' - title: Duration - state: - anyOf: - - $ref: '#/components/schemas/TaskInstanceState' - - type: 'null' - try_number: - type: integer - title: Try Number - max_tries: - type: integer - title: Max Tries - task_display_name: - type: string - title: Task Display Name - hostname: - anyOf: - - type: string - - type: 'null' - title: Hostname - unixname: - anyOf: - - type: string - - type: 'null' - title: Unixname - pool: - type: string - title: Pool - pool_slots: - type: integer - title: Pool Slots - queue: - anyOf: - - type: string - - type: 'null' - title: Queue - priority_weight: - anyOf: - - type: integer - - type: 'null' - title: Priority Weight - operator: - anyOf: - - type: string - - type: 'null' - title: Operator - queued_when: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Queued When - scheduled_when: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Scheduled When - pid: - anyOf: - - type: integer - - type: 'null' - title: Pid - executor: - anyOf: - - type: string - - type: 'null' - title: Executor - executor_config: - type: string - title: Executor Config - dag_version: - anyOf: - - $ref: '#/components/schemas/DagVersionResponse' - - type: 'null' - type: object - required: - - task_id - - dag_id - - dag_run_id - - map_index - - start_date - - end_date - - duration - - state - - try_number - - max_tries - - task_display_name - - hostname - - unixname - - pool - - pool_slots - - queue - - priority_weight - - operator - - queued_when - - scheduled_when - - pid - - executor - - executor_config - - dag_version - title: TaskInstanceHistoryResponse - description: TaskInstanceHistory serializer for responses. - TaskInstanceResponse: - properties: - id: - type: string - title: Id - task_id: - type: string - title: Task Id - dag_id: - type: string - title: Dag Id - dag_run_id: - type: string - title: Dag Run Id - map_index: - type: integer - title: Map Index - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - run_after: - type: string - format: date-time - title: Run After - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - duration: - anyOf: - - type: number - - type: 'null' - title: Duration - state: - anyOf: - - $ref: '#/components/schemas/TaskInstanceState' - - type: 'null' - try_number: - type: integer - title: Try Number - max_tries: - type: integer - title: Max Tries - task_display_name: - type: string - title: Task Display Name - hostname: - anyOf: - - type: string - - type: 'null' - title: Hostname - unixname: - anyOf: - - type: string - - type: 'null' - title: Unixname - pool: - type: string - title: Pool - pool_slots: - type: integer - title: Pool Slots - queue: - anyOf: - - type: string - - type: 'null' - title: Queue - priority_weight: - anyOf: - - type: integer - - type: 'null' - title: Priority Weight - operator: - anyOf: - - type: string - - type: 'null' - title: Operator - queued_when: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Queued When - scheduled_when: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Scheduled When - pid: - anyOf: - - type: integer - - type: 'null' - title: Pid - executor: - anyOf: - - type: string - - type: 'null' - title: Executor - executor_config: - type: string - title: Executor Config - note: - anyOf: - - type: string - - type: 'null' - title: Note - rendered_map_index: - anyOf: - - type: string - - type: 'null' - title: Rendered Map Index - rendered_fields: - additionalProperties: true - type: object - title: Rendered Fields - trigger: - anyOf: - - $ref: '#/components/schemas/TriggerResponse' - - type: 'null' - triggerer_job: - anyOf: - - $ref: '#/components/schemas/JobResponse' - - type: 'null' - dag_version: - anyOf: - - $ref: '#/components/schemas/DagVersionResponse' - - type: 'null' - type: object - required: - - id - - task_id - - dag_id - - dag_run_id - - map_index - - logical_date - - run_after - - start_date - - end_date - - duration - - state - - try_number - - max_tries - - task_display_name - - hostname - - unixname - - pool - - pool_slots - - queue - - priority_weight - - operator - - queued_when - - scheduled_when - - pid - - executor - - executor_config - - note - - rendered_map_index - - trigger - - triggerer_job - - dag_version - title: TaskInstanceResponse - description: TaskInstance serializer for responses. - TaskInstanceState: - type: string - enum: - - removed - - scheduled - - queued - - running - - success - - restarting - - failed - - up_for_retry - - up_for_reschedule - - upstream_failed - - skipped - - deferred - title: TaskInstanceState - description: 'All possible states that a Task Instance can be in. - - - Note that None is also allowed, so always use this in a type hint with Optional.' - TaskInstancesBatchBody: - properties: - dag_ids: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Dag Ids - dag_run_ids: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Dag Run Ids - task_ids: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Task Ids - state: - anyOf: - - items: - anyOf: - - $ref: '#/components/schemas/TaskInstanceState' - - type: 'null' - type: array - - type: 'null' - title: State - run_after_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Gte - run_after_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Lte - logical_date_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Gte - logical_date_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Lte - start_date_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Gte - start_date_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date Lte - end_date_gte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Gte - end_date_lte: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date Lte - duration_gte: - anyOf: - - type: number - - type: 'null' - title: Duration Gte - duration_lte: - anyOf: - - type: number - - type: 'null' - title: Duration Lte - pool: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Pool - queue: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Queue - executor: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Executor - page_offset: - type: integer - minimum: 0.0 - title: Page Offset - default: 0 - page_limit: - type: integer - minimum: 0.0 - title: Page Limit - default: 100 - order_by: - anyOf: - - type: string - - type: 'null' - title: Order By - additionalProperties: false - type: object - title: TaskInstancesBatchBody - description: Task Instance body for get batch. - TaskInstancesLogResponse: - properties: - content: - anyOf: - - items: - $ref: '#/components/schemas/StructuredLogMessage' - type: array - - items: - type: string - type: array - title: Content - continuation_token: - anyOf: - - type: string - - type: 'null' - title: Continuation Token - type: object - required: - - content - - continuation_token - title: TaskInstancesLogResponse - description: Log serializer for responses. - TaskOutletAssetReference: - properties: - dag_id: - type: string - title: Dag Id - task_id: - type: string - title: Task Id - created_at: - type: string - format: date-time - title: Created At - updated_at: - type: string - format: date-time - title: Updated At - additionalProperties: false - type: object - required: - - dag_id - - task_id - - created_at - - updated_at - title: TaskOutletAssetReference - description: Task outlet reference serializer for assets. - TaskResponse: - properties: - task_id: - anyOf: - - type: string - - type: 'null' - title: Task Id - task_display_name: - anyOf: - - type: string - - type: 'null' - title: Task Display Name - owner: - anyOf: - - type: string - - type: 'null' - title: Owner - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - trigger_rule: - anyOf: - - type: string - - type: 'null' - title: Trigger Rule - depends_on_past: - type: boolean - title: Depends On Past - wait_for_downstream: - type: boolean - title: Wait For Downstream - retries: - anyOf: - - type: number - - type: 'null' - title: Retries - queue: - anyOf: - - type: string - - type: 'null' - title: Queue - pool: - anyOf: - - type: string - - type: 'null' - title: Pool - pool_slots: - anyOf: - - type: number - - type: 'null' - title: Pool Slots - execution_timeout: - anyOf: - - $ref: '#/components/schemas/TimeDelta' - - type: 'null' - retry_delay: - anyOf: - - $ref: '#/components/schemas/TimeDelta' - - type: 'null' - retry_exponential_backoff: - type: boolean - title: Retry Exponential Backoff - priority_weight: - anyOf: - - type: number - - type: 'null' - title: Priority Weight - weight_rule: - anyOf: - - type: string - - type: 'null' - title: Weight Rule - ui_color: - anyOf: - - type: string - - type: 'null' - title: Ui Color - ui_fgcolor: - anyOf: - - type: string - - type: 'null' - title: Ui Fgcolor - template_fields: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Template Fields - downstream_task_ids: - anyOf: - - items: - type: string - type: array - - type: 'null' - title: Downstream Task Ids - doc_md: - anyOf: - - type: string - - type: 'null' - title: Doc Md - operator_name: - anyOf: - - type: string - - type: 'null' - title: Operator Name - params: - anyOf: - - additionalProperties: true - type: object - - type: 'null' - title: Params - class_ref: - anyOf: - - additionalProperties: true - type: object - - type: 'null' - title: Class Ref - is_mapped: - anyOf: - - type: boolean - - type: 'null' - title: Is Mapped - extra_links: - items: - type: string - type: array - title: Extra Links - description: Extract and return extra_links. - readOnly: true - type: object - required: - - task_id - - task_display_name - - owner - - start_date - - end_date - - trigger_rule - - depends_on_past - - wait_for_downstream - - retries - - queue - - pool - - pool_slots - - execution_timeout - - retry_delay - - retry_exponential_backoff - - priority_weight - - weight_rule - - ui_color - - ui_fgcolor - - template_fields - - downstream_task_ids - - doc_md - - operator_name - - params - - class_ref - - is_mapped - - extra_links - title: TaskResponse - description: Task serializer for responses. - TimeDelta: - properties: - __type: - type: string - title: Type - default: TimeDelta - days: - type: integer - title: Days - seconds: - type: integer - title: Seconds - microseconds: - type: integer - title: Microseconds - type: object - required: - - days - - seconds - - microseconds - title: TimeDelta - description: TimeDelta can be used to interact with datetime.timedelta objects. - TriggerDAGRunPostBody: - properties: - dag_run_id: - anyOf: - - type: string - - type: 'null' - title: Dag Run Id - data_interval_start: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval Start - data_interval_end: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval End - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - run_after: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After - conf: - additionalProperties: true - type: object - title: Conf - note: - anyOf: - - type: string - - type: 'null' - title: Note - additionalProperties: false - type: object - required: - - logical_date - title: TriggerDAGRunPostBody - description: Trigger DAG Run Serializer for POST body. - TriggerResponse: - properties: - id: - type: integer - title: Id - classpath: - type: string - title: Classpath - kwargs: - type: string - title: Kwargs - created_date: - type: string - format: date-time - title: Created Date - triggerer_id: - anyOf: - - type: integer - - type: 'null' - title: Triggerer Id - type: object - required: - - id - - classpath - - kwargs - - created_date - - triggerer_id - title: TriggerResponse - description: Trigger serializer for responses. - TriggererInfoResponse: - properties: - status: - anyOf: - - type: string - - type: 'null' - title: Status - latest_triggerer_heartbeat: - anyOf: - - type: string - - type: 'null' - title: Latest Triggerer Heartbeat - type: object - required: - - status - - latest_triggerer_heartbeat - title: TriggererInfoResponse - description: Triggerer info serializer for responses. - ValidationError: - properties: - loc: - items: - anyOf: - - type: string - - type: integer - type: array - title: Location - msg: - type: string - title: Message - type: - type: string - title: Error Type - type: object - required: - - loc - - msg - - type - title: ValidationError - VariableBody: - properties: - key: - type: string - maxLength: 250 - title: Key - value: - type: string - title: Value - description: - anyOf: - - type: string - - type: 'null' - title: Description - additionalProperties: false - type: object - required: - - key - - value - title: VariableBody - description: Variable serializer for bodies. - VariableCollectionResponse: - properties: - variables: - items: - $ref: '#/components/schemas/VariableResponse' - type: array - title: Variables - total_entries: - type: integer - title: Total Entries - type: object - required: - - variables - - total_entries - title: VariableCollectionResponse - description: Variable Collection serializer for responses. - VariableResponse: - properties: - key: - type: string - title: Key - value: - type: string - title: Value - description: - anyOf: - - type: string - - type: 'null' - title: Description - is_encrypted: - type: boolean - title: Is Encrypted - type: object - required: - - key - - value - - description - - is_encrypted - title: VariableResponse - description: Variable serializer for responses. - VersionInfo: - properties: - version: - type: string - title: Version - git_version: - anyOf: - - type: string - - type: 'null' - title: Git Version - type: object - required: - - version - - git_version - title: VersionInfo - description: Version information serializer for responses. - XComCollectionResponse: - properties: - xcom_entries: - items: - $ref: '#/components/schemas/XComResponse' - type: array - title: Xcom Entries - total_entries: - type: integer - title: Total Entries - type: object - required: - - xcom_entries - - total_entries - title: XComCollectionResponse - description: XCom Collection serializer for responses. - XComCreateBody: - properties: - key: - type: string - title: Key - value: - title: Value - map_index: - type: integer - title: Map Index - default: -1 - additionalProperties: false - type: object - required: - - key - - value - title: XComCreateBody - description: Payload serializer for creating an XCom entry. - XComResponse: - properties: - key: - type: string - title: Key - timestamp: - type: string - format: date-time - title: Timestamp - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - map_index: - type: integer - title: Map Index - task_id: - type: string - title: Task Id - dag_id: - type: string - title: Dag Id - run_id: - type: string - title: Run Id - type: object - required: - - key - - timestamp - - logical_date - - map_index - - task_id - - dag_id - - run_id - title: XComResponse - description: Serializer for a xcom item. - XComResponseNative: - properties: - key: - type: string - title: Key - timestamp: - type: string - format: date-time - title: Timestamp - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - map_index: - type: integer - title: Map Index - task_id: - type: string - title: Task Id - dag_id: - type: string - title: Dag Id - run_id: - type: string - title: Run Id - value: - title: Value - type: object - required: - - key - - timestamp - - logical_date - - map_index - - task_id - - dag_id - - run_id - - value - title: XComResponseNative - description: XCom response serializer with native return type. - XComResponseString: - properties: - key: - type: string - title: Key - timestamp: - type: string - format: date-time - title: Timestamp - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - map_index: - type: integer - title: Map Index - task_id: - type: string - title: Task Id - dag_id: - type: string - title: Dag Id - run_id: - type: string - title: Run Id - value: - anyOf: - - type: string - - type: 'null' - title: Value - type: object - required: - - key - - timestamp - - logical_date - - map_index - - task_id - - dag_id - - run_id - - value - title: XComResponseString - description: XCom response serializer with string return type. - XComUpdateBody: - properties: - value: - title: Value - map_index: - type: integer - title: Map Index - default: -1 - additionalProperties: false - type: object - required: - - value - title: XComUpdateBody - description: Payload serializer for updating an XCom entry. - securitySchemes: - OAuth2PasswordBearer: - type: oauth2 - description: To authenticate Airflow API requests, clients must include a JWT - (JSON Web Token) in the Authorization header of each request. This token is - used to verify the identity of the client and ensure that they have the appropriate - permissions to access the requested resources. You can use the endpoint ``POST - /auth/token`` in order to generate a JWT token. Upon successful authentication, - the server will issue a JWT token that contains the necessary information - (such as user identity and scope) to authenticate subsequent requests. To - learn more about Airflow public API authentication, please read https://airflow.apache.org/docs/apache-airflow/stable/security/api.html. - flows: - password: - scopes: {} - tokenUrl: /auth/token diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml new file mode 100644 index 0000000000000..f42e1846bdfd3 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -0,0 +1,12170 @@ +openapi: 3.1.0 +info: + title: Airflow API + description: Airflow API. All endpoints located under ``/api/v2`` can be used safely, + are stable and backward compatible. Endpoints located under ``/ui`` are dedicated + to the UI and are subject to breaking change depending on the need of the frontend. + Users should not rely on those but use the public ones instead. + version: '2' +paths: + /api/v2/assets: + get: + tags: + - Asset + summary: Get Assets + description: Get assets. + operationId: get_assets + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: uri_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Uri Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: dag_ids + in: query + required: false + schema: + type: array + items: + type: string + title: Dag Ids + - name: only_active + in: query + required: false + schema: + type: boolean + default: true + title: Only Active + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AssetCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/assets/aliases: + get: + tags: + - Asset + summary: Get Asset Aliases + description: Get asset aliases. + operationId: get_asset_aliases + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AssetAliasCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/assets/aliases/{asset_alias_id}: + get: + tags: + - Asset + summary: Get Asset Alias + description: Get an asset alias. + operationId: get_asset_alias + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: asset_alias_id + in: path + required: true + schema: + type: integer + title: Asset Alias Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/assets/events: + get: + tags: + - Asset + summary: Get Asset Events + description: Get asset events. + operationId: get_asset_events + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - timestamp + title: Order By + - name: asset_id + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Asset Id + - name: source_dag_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Source Dag Id + - name: source_task_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Source Task Id + - name: source_run_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Source Run Id + - name: source_map_index + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Source Map Index + - name: timestamp_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Timestamp Gte + - name: timestamp_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Timestamp Lte + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AssetEventCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Asset + summary: Create Asset Event + description: Create asset events. + operationId: create_asset_event + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CreateAssetEventsBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AssetEventResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/assets/{asset_id}/materialize: + post: + tags: + - Asset + summary: Materialize Asset + description: Materialize an asset by triggering a DAG run that produces it. + operationId: materialize_asset + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: asset_id + in: path + required: true + schema: + type: integer + title: Asset Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/assets/{asset_id}/queuedEvents: + get: + tags: + - Asset + summary: Get Asset Queued Events + description: Get queued asset events for an asset. + operationId: get_asset_queued_events + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: asset_id + in: path + required: true + schema: + type: integer + title: Asset Id + - name: before + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Before + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/QueuedEventCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - Asset + summary: Delete Asset Queued Events + description: Delete queued asset events for an asset. + operationId: delete_asset_queued_events + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: asset_id + in: path + required: true + schema: + type: integer + title: Asset Id + - name: before + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Before + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/assets/{asset_id}: + get: + tags: + - Asset + summary: Get Asset + description: Get an asset. + operationId: get_asset + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: asset_id + in: path + required: true + schema: + type: integer + title: Asset Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AssetResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/assets/queuedEvents: + get: + tags: + - Asset + summary: Get Dag Asset Queued Events + description: Get queued asset events for a DAG. + operationId: get_dag_asset_queued_events + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: before + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Before + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/QueuedEventCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - Asset + summary: Delete Dag Asset Queued Events + operationId: delete_dag_asset_queued_events + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: before + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Before + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents: + get: + tags: + - Asset + summary: Get Dag Asset Queued Event + description: Get a queued asset event for a DAG. + operationId: get_dag_asset_queued_event + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: asset_id + in: path + required: true + schema: + type: integer + title: Asset Id + - name: before + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Before + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/QueuedEventResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - Asset + summary: Delete Dag Asset Queued Event + description: Delete a queued asset event for a DAG. + operationId: delete_dag_asset_queued_event + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: asset_id + in: path + required: true + schema: + type: integer + title: Asset Id + - name: before + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Before + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/backfills: + get: + tags: + - Backfill + summary: List Backfills + operationId: list_backfills + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: query + required: true + schema: + type: string + title: Dag Id + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Backfill + summary: Create Backfill + operationId: create_backfill + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillPostBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/backfills/{backfill_id}: + get: + tags: + - Backfill + summary: Get Backfill + operationId: get_backfill + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: backfill_id + in: path + required: true + schema: + type: integer + minimum: 0 + title: Backfill Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/backfills/{backfill_id}/pause: + put: + tags: + - Backfill + summary: Pause Backfill + operationId: pause_backfill + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: backfill_id + in: path + required: true + schema: + type: integer + minimum: 0 + title: Backfill Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/backfills/{backfill_id}/unpause: + put: + tags: + - Backfill + summary: Unpause Backfill + operationId: unpause_backfill + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: backfill_id + in: path + required: true + schema: + type: integer + minimum: 0 + title: Backfill Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/backfills/{backfill_id}/cancel: + put: + tags: + - Backfill + summary: Cancel Backfill + operationId: cancel_backfill + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: backfill_id + in: path + required: true + schema: + type: integer + minimum: 0 + title: Backfill Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/backfills/dry_run: + post: + tags: + - Backfill + summary: Create Backfill Dry Run + operationId: create_backfill_dry_run + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillPostBody' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DryRunBackfillCollectionResponse' + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '404': + description: Not Found + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '409': + description: Conflict + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + /api/v2/connections/{connection_id}: + delete: + tags: + - Connection + summary: Delete Connection + description: Delete a connection entry. + operationId: delete_connection + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: connection_id + in: path + required: true + schema: + type: string + title: Connection Id + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Connection + summary: Get Connection + description: Get a connection entry. + operationId: get_connection + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: connection_id + in: path + required: true + schema: + type: string + title: Connection Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Connection + summary: Patch Connection + description: Update a connection entry. + operationId: patch_connection + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: connection_id + in: path + required: true + schema: + type: string + title: Connection Id + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/connections: + get: + tags: + - Connection + summary: Get Connections + description: Get all connection entries. + operationId: get_connections + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + - name: connection_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Connection Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Connection + summary: Post Connection + description: Create connection entry. + operationId: post_connection + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Connection + summary: Bulk Connections + description: Bulk create, update, and delete connections. + operationId: bulk_connections + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BulkBody_ConnectionBody_' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BulkResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/connections/test: + post: + tags: + - Connection + summary: Test Connection + description: 'Test an API connection. + + + This method first creates an in-memory transient conn_id & exports that to + an env var, + + as some hook classes tries to find out the `conn` from their __init__ method + & errors out if not found. + + It also deletes the conn id env connection after the test.' + operationId: test_connection + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionBody' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionTestResponse' + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + /api/v2/connections/defaults: + post: + tags: + - Connection + summary: Create Default Connections + description: Create default connections. + operationId: create_default_connections + responses: + '204': + description: Successful Response + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}: + get: + tags: + - DagRun + summary: Get Dag Run + operationId: get_dag_run + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - DagRun + summary: Delete Dag Run + description: Delete a DAG Run entry. + operationId: delete_dag_run + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - DagRun + summary: Patch Dag Run + description: Modify a DAG Run. + operationId: patch_dag_run + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamAssetEvents: + get: + tags: + - DagRun + summary: Get Upstream Asset Events + description: If dag run is asset-triggered, return the asset events that triggered + it. + operationId: get_upstream_asset_events + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AssetEventCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/clear: + post: + tags: + - DagRun + summary: Clear Dag Run + operationId: clear_dag_run + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunClearBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + anyOf: + - $ref: '#/components/schemas/TaskInstanceCollectionResponse' + - $ref: '#/components/schemas/DAGRunResponse' + title: Response Clear Dag Run + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns: + get: + tags: + - DagRun + summary: Get Dag Runs + description: 'Get all DAG Runs. + + + This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for + all DAGs.' + operationId: get_dag_runs + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: run_after_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Gte + - name: run_after_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Lte + - name: logical_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Gte + - name: logical_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Lte + - name: start_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + - name: start_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + - name: end_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + - name: end_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte + - name: updated_at_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Updated At Gte + - name: updated_at_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Updated At Lte + - name: run_type + in: query + required: false + schema: + type: array + items: + type: string + title: Run Type + - name: state + in: query + required: false + schema: + type: array + items: + type: string + title: State + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + - name: run_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Run Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: triggering_user_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Triggering User Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - DagRun + summary: Trigger Dag Run + description: Trigger a DAG. + operationId: trigger_dag_run + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + title: Dag Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TriggerDAGRunPostBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait: + get: + tags: + - DagRun + - experimental + summary: 'Experimental: Wait for a dag run to complete, and return task results + if requested.' + description: "\U0001F6A7 This is an experimental endpoint and may change or\ + \ be removed without notice." + operationId: wait_dag_run_until_finished + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: interval + in: query + required: true + schema: + type: number + exclusiveMinimum: 0.0 + description: Seconds to wait between dag run state checks + title: Interval + description: Seconds to wait between dag run state checks + - name: result + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + description: Collect result XCom from task. Can be set multiple times. + title: Result + description: Collect result XCom from task. Can be set multiple times. + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + application/x-ndjson: + schema: + type: string + example: '{"state": "running"} + + {"state": "success", "results": {"op": 42}} + + ' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/list: + post: + tags: + - DagRun + summary: Get List Dag Runs Batch + description: Get a list of DAG Runs. + operationId: get_list_dag_runs_batch + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + const: '~' + type: string + title: Dag Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunsBatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dagSources/{dag_id}: + get: + tags: + - DagSource + summary: Get Dag Source + description: Get source code using file token. + operationId: get_dag_source + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: version_number + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Version Number + - name: accept + in: header + required: false + schema: + type: string + enum: + - application/json + - text/plain + - '*/*' + default: '*/*' + title: Accept + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGSourceResponse' + text/plain: + schema: + type: string + example: dag code + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '406': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Acceptable + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dagStats: + get: + tags: + - DagStats + summary: Get Dag Stats + description: Get Dag statistics. + operationId: get_dag_stats + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_ids + in: query + required: false + schema: + type: array + items: + type: string + title: Dag Ids + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DagStatsCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dagReports: + get: + tags: + - DagReport + summary: Get Dag Reports + description: Get DAG report. + operationId: get_dag_reports + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: subdir + in: query + required: true + schema: + type: string + title: Subdir + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/config: + get: + tags: + - Config + summary: Get Config + operationId: get_config + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: section + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Section + - name: accept + in: header + required: false + schema: + type: string + enum: + - application/json + - text/plain + - '*/*' + default: '*/*' + title: Accept + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Config' + text/plain: + schema: + type: string + example: '[core] + + dags_folder = /opt/airflow/dags + + base_log_folder = /opt/airflow/logs + + + [smtp] + + smtp_host = localhost + + smtp_mail_from = airflow@example.com + + ' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '406': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Acceptable + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/config/section/{section}/option/{option}: + get: + tags: + - Config + summary: Get Config Value + operationId: get_config_value + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: section + in: path + required: true + schema: + type: string + title: Section + - name: option + in: path + required: true + schema: + type: string + title: Option + - name: accept + in: header + required: false + schema: + type: string + enum: + - application/json + - text/plain + - '*/*' + default: '*/*' + title: Accept + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Config' + text/plain: + schema: + type: string + example: '[core] + + dags_folder = /opt/airflow/dags + + base_log_folder = /opt/airflow/logs + + ' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '406': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Acceptable + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dagWarnings: + get: + tags: + - DagWarning + summary: List Dag Warnings + description: Get a list of DAG warnings. + operationId: list_dag_warnings + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id + - name: warning_type + in: query + required: false + schema: + anyOf: + - $ref: '#/components/schemas/DagWarningType' + - type: 'null' + title: Warning Type + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - dag_id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGWarningCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags: + get: + tags: + - DAG + summary: Get Dags + description: Get all DAGs. + operationId: get_dags + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: tags + in: query + required: false + schema: + type: array + items: + type: string + title: Tags + - name: tags_match_mode + in: query + required: false + schema: + anyOf: + - enum: + - any + - all + type: string + - type: 'null' + title: Tags Match Mode + - name: owners + in: query + required: false + schema: + type: array + items: + type: string + title: Owners + - name: dag_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Dag Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: dag_display_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Dag Display Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: exclude_stale + in: query + required: false + schema: + type: boolean + default: true + title: Exclude Stale + - name: paused + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Paused + - name: last_dag_run_state + in: query + required: false + schema: + anyOf: + - $ref: '#/components/schemas/DagRunState' + - type: 'null' + title: Last Dag Run State + - name: bundle_name + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Bundle Name + - name: bundle_version + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + - name: dag_run_start_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Dag Run Start Date Gte + - name: dag_run_start_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Dag Run Start Date Lte + - name: dag_run_end_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Dag Run End Date Gte + - name: dag_run_end_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Dag Run End Date Lte + - name: dag_run_state + in: query + required: false + schema: + type: array + items: + type: string + title: Dag Run State + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - dag_id + title: Order By + - name: is_favorite + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Is Favorite + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - DAG + summary: Patch Dags + description: Patch multiple DAGs. + operationId: patch_dags + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: tags + in: query + required: false + schema: + type: array + items: + type: string + title: Tags + - name: tags_match_mode + in: query + required: false + schema: + anyOf: + - enum: + - any + - all + type: string + - type: 'null' + title: Tags Match Mode + - name: owners + in: query + required: false + schema: + type: array + items: + type: string + title: Owners + - name: dag_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Dag Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: exclude_stale + in: query + required: false + schema: + type: boolean + default: true + title: Exclude Stale + - name: paused + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Paused + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}: + get: + tags: + - DAG + summary: Get Dag + description: Get basic information about a DAG. + operationId: get_dag + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity + patch: + tags: + - DAG + summary: Patch Dag + description: Patch the specific DAG. + operationId: patch_dag + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - DAG + summary: Delete Dag + description: Delete the specific DAG. + operationId: delete_dag + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity + /api/v2/dags/{dag_id}/details: + get: + tags: + - DAG + summary: Get Dag Details + description: Get details of DAG. + operationId: get_dag_details + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGDetailsResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/favorite: + post: + tags: + - DAG + summary: Favorite Dag + description: Mark the DAG as favorite. + operationId: favorite_dag + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/unfavorite: + post: + tags: + - DAG + summary: Unfavorite Dag + description: Unmark the DAG as favorite. + operationId: unfavorite_dag + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/eventLogs/{event_log_id}: + get: + tags: + - Event Log + summary: Get Event Log + operationId: get_event_log + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: event_log_id + in: path + required: true + schema: + type: integer + title: Event Log Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/EventLogResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/eventLogs: + get: + tags: + - Event Log + summary: Get Event Logs + description: Get all Event Logs. + operationId: get_event_logs + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + - name: dag_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id + - name: task_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Task Id + - name: run_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Run Id + - name: map_index + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Map Index + - name: try_number + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Try Number + - name: owner + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Owner + - name: event + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Event + - name: excluded_events + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Excluded Events + - name: included_events + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Included Events + - name: before + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Before + - name: after + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: After + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/EventLogCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links: + get: + tags: + - Extra Links + - Task Instance + summary: Get Extra Links + description: Get extra links for task instance. + operationId: get_extra_links + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ExtraLinkCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/importErrors/{import_error_id}: + get: + tags: + - Import Error + summary: Get Import Error + description: Get an import error. + operationId: get_import_error + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: import_error_id + in: path + required: true + schema: + type: integer + title: Import Error Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ImportErrorResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/importErrors: + get: + tags: + - Import Error + summary: Get Import Errors + description: Get all import errors. + operationId: get_import_errors + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ImportErrorCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/jobs: + get: + tags: + - Job + summary: Get Jobs + description: Get all jobs. + operationId: get_jobs + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: is_alive + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Is Alive + - name: start_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + - name: start_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + - name: end_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + - name: end_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + - name: job_state + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Job State + - name: job_type + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Job Type + - name: hostname + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Hostname + - name: executor_class + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Executor Class + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/JobCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/plugins: + get: + tags: + - Plugin + summary: Get Plugins + operationId: get_plugins + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PluginCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/plugins/importErrors: + get: + tags: + - Plugin + summary: Import Errors + operationId: import_errors + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PluginImportErrorCollectionResponse' + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + /api/v2/pools/{pool_name}: + delete: + tags: + - Pool + summary: Delete Pool + description: Delete a pool entry. + operationId: delete_pool + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: pool_name + in: path + required: true + schema: + type: string + title: Pool Name + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Pool + summary: Get Pool + description: Get a pool. + operationId: get_pool + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: pool_name + in: path + required: true + schema: + type: string + title: Pool Name + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Pool + summary: Patch Pool + description: Update a Pool. + operationId: patch_pool + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: pool_name + in: path + required: true + schema: + type: string + title: Pool Name + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PoolPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/pools: + get: + tags: + - Pool + summary: Get Pools + description: Get all pools entries. + operationId: get_pools + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + - name: pool_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Pool Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Pool + summary: Post Pool + description: Create a Pool. + operationId: post_pool + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PoolBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Pool + summary: Bulk Pools + description: Bulk create, update, and delete pools. + operationId: bulk_pools + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BulkBody_PoolBody_' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BulkResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/providers: + get: + tags: + - Provider + summary: Get Providers + description: Get providers. + operationId: get_providers + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ProviderCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}: + get: + tags: + - XCom + summary: Get Xcom Entry + description: Get an XCom entry. + operationId: get_xcom_entry + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: xcom_key + in: path + required: true + schema: + type: string + title: Xcom Key + - name: map_index + in: query + required: false + schema: + type: integer + minimum: -1 + default: -1 + title: Map Index + - name: deserialize + in: query + required: false + schema: + type: boolean + default: false + title: Deserialize + - name: stringify + in: query + required: false + schema: + type: boolean + default: false + title: Stringify + responses: + '200': + description: Successful Response + content: + application/json: + schema: + anyOf: + - $ref: '#/components/schemas/XComResponseNative' + - $ref: '#/components/schemas/XComResponseString' + title: Response Get Xcom Entry + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - XCom + summary: Update Xcom Entry + description: Update an existing XCom entry. + operationId: update_xcom_entry + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: xcom_key + in: path + required: true + schema: + type: string + title: Xcom Key + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/XComUpdateBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/XComResponseNative' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries: + get: + tags: + - XCom + summary: Get Xcom Entries + description: 'Get all XCom entries. + + + This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to + retrieve XCom entries for all DAGs.' + operationId: get_xcom_entries + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: xcom_key + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Xcom Key + - name: map_index + in: query + required: false + schema: + anyOf: + - type: integer + minimum: -1 + - type: 'null' + title: Map Index + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/XComCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - XCom + summary: Create Xcom Entry + description: Create an XCom entry. + operationId: create_xcom_entry + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/XComCreateBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/XComResponseNative' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}: + get: + tags: + - Task Instance + summary: Get Task Instance + description: Get task instance. + operationId: get_task_instance + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Task Instance + summary: Patch Task Instance + description: Update a task instance. + operationId: patch_task_instance + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Map Index + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchTaskInstanceBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - Task Instance + summary: Delete Task Instance + description: Delete a task instance. + operationId: delete_task_instance + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + type: 'null' + title: Response Delete Task Instance + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped: + get: + tags: + - Task Instance + summary: Get Mapped Task Instances + description: Get list of mapped task instances. + operationId: get_mapped_task_instances + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: run_after_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Gte + - name: run_after_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Lte + - name: logical_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Gte + - name: logical_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Lte + - name: start_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + - name: start_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + - name: end_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + - name: end_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte + - name: updated_at_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Updated At Gte + - name: updated_at_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Updated At Lte + - name: duration_gte + in: query + required: false + schema: + anyOf: + - type: number + - type: 'null' + title: Duration Gte + - name: duration_lte + in: query + required: false + schema: + anyOf: + - type: number + - type: 'null' + title: Duration Lte + - name: state + in: query + required: false + schema: + type: array + items: + type: string + title: State + - name: pool + in: query + required: false + schema: + type: array + items: + type: string + title: Pool + - name: queue + in: query + required: false + schema: + type: array + items: + type: string + title: Queue + - name: executor + in: query + required: false + schema: + type: array + items: + type: string + title: Executor + - name: version_number + in: query + required: false + schema: + type: array + items: + type: integer + title: Version Number + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - map_index + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies: + get: + tags: + - Task Instance + summary: Get Task Instance Dependencies + description: Get dependencies blocking task from getting scheduled. + operationId: get_task_instance_dependencies_by_map_index + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskDependencyCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies: + get: + tags: + - Task Instance + summary: Get Task Instance Dependencies + description: Get dependencies blocking task from getting scheduled. + operationId: get_task_instance_dependencies + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskDependencyCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries: + get: + tags: + - Task Instance + summary: Get Task Instance Tries + description: Get list of task instances history. + operationId: get_task_instance_tries + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceHistoryCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries: + get: + tags: + - Task Instance + summary: Get Mapped Task Instance Tries + operationId: get_mapped_task_instance_tries + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceHistoryCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}: + get: + tags: + - Task Instance + summary: Get Mapped Task Instance + description: Get task instance. + operationId: get_mapped_task_instance + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Task Instance + summary: Patch Task Instance + description: Update a task instance. + operationId: patch_task_instance_by_map_index + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + anyOf: + - type: integer + - type: 'null' + title: Map Index + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchTaskInstanceBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances: + get: + tags: + - Task Instance + summary: Get Task Instances + description: 'Get list of task instances. + + + This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve + Task Instances for all DAGs + + and DAG runs.' + operationId: get_task_instances + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Task Id + - name: run_after_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Gte + - name: run_after_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Lte + - name: logical_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Gte + - name: logical_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Lte + - name: start_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + - name: start_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + - name: end_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + - name: end_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte + - name: updated_at_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Updated At Gte + - name: updated_at_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Updated At Lte + - name: duration_gte + in: query + required: false + schema: + anyOf: + - type: number + - type: 'null' + title: Duration Gte + - name: duration_lte + in: query + required: false + schema: + anyOf: + - type: number + - type: 'null' + title: Duration Lte + - name: task_display_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Task Display Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: state + in: query + required: false + schema: + type: array + items: + type: string + title: State + - name: pool + in: query + required: false + schema: + type: array + items: + type: string + title: Pool + - name: queue + in: query + required: false + schema: + type: array + items: + type: string + title: Queue + - name: executor + in: query + required: false + schema: + type: array + items: + type: string + title: Executor + - name: version_number + in: query + required: false + schema: + type: array + items: + type: integer + title: Version Number + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - map_index + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Task Instance + summary: Bulk Task Instances + description: Bulk update, and delete task instances. + operationId: bulk_task_instances + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BulkBody_BulkTaskInstanceBody_' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BulkResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list: + post: + tags: + - Task Instance + summary: Get Task Instances Batch + description: Get list of task instances. + operationId: get_task_instances_batch + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + const: '~' + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + const: '~' + type: string + title: Dag Run Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstancesBatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}: + get: + tags: + - Task Instance + summary: Get Task Instance Try Details + description: Get task instance details by try number. + operationId: get_task_instance_try_details + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: task_try_number + in: path + required: true + schema: + type: integer + title: Task Try Number + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceHistoryResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}: + get: + tags: + - Task Instance + summary: Get Mapped Task Instance Try Details + operationId: get_mapped_task_instance_try_details + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: task_try_number + in: path + required: true + schema: + type: integer + title: Task Try Number + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceHistoryResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/clearTaskInstances: + post: + tags: + - Task Instance + summary: Post Clear Task Instances + description: Clear task instances. + operationId: post_clear_task_instances + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ClearTaskInstancesBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dry_run: + patch: + tags: + - Task Instance + summary: Patch Task Instance Dry Run + description: Update a task instance dry_run mode. + operationId: patch_task_instance_dry_run_by_map_index + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + anyOf: + - type: integer + - type: 'null' + title: Map Index + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchTaskInstanceBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dry_run: + patch: + tags: + - Task Instance + summary: Patch Task Instance Dry Run + description: Update a task instance dry_run mode. + operationId: patch_task_instance_dry_run + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: query + required: false + schema: + anyOf: + - type: integer + - type: 'null' + title: Map Index + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PatchTaskInstanceBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/tasks: + get: + tags: + - Task + summary: Get Tasks + description: Get tasks for DAG. + operationId: get_tasks + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: order_by + in: query + required: false + schema: + type: string + default: task_id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/tasks/{task_id}: + get: + tags: + - Task + summary: Get Task + description: Get simplified representation of a task. + operationId: get_task + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: task_id + in: path + required: true + schema: + title: Task Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/variables/{variable_key}: + delete: + tags: + - Variable + summary: Delete Variable + description: Delete a variable entry. + operationId: delete_variable + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Variable + summary: Get Variable + description: Get a variable entry. + operationId: get_variable + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Variable + summary: Patch Variable + description: Update a variable by key. + operationId: patch_variable + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/VariableBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/variables: + get: + tags: + - Variable + summary: Get Variables + description: Get all Variables entries. + operationId: get_variables + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + - name: variable_key_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Variable Key Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Variable + summary: Post Variable + description: Create a variable. + operationId: post_variable + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/VariableBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Variable + summary: Bulk Variables + description: Bulk create, update, and delete variables. + operationId: bulk_variables + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BulkBody_VariableBody_' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/BulkResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}: + get: + tags: + - Task Instance + summary: Get Log + description: Get logs for a specific task instance. + operationId: get_log + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: try_number + in: path + required: true + schema: + type: integer + minimum: 0 + title: Try Number + - name: full_content + in: query + required: false + schema: + type: boolean + default: false + title: Full Content + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + - name: token + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Token + - name: accept + in: header + required: false + schema: + type: string + enum: + - application/json + - application/x-ndjson + - '*/*' + default: '*/*' + title: Accept + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstancesLogResponse' + application/x-ndjson: + schema: + type: string + example: '{"content": "content"} + + {"content": "content"} + + ' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/externalLogUrl/{try_number}: + get: + tags: + - Task Instance + summary: Get External Log Url + description: Get external log URL for a specific task instance. + operationId: get_external_log_url + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: try_number + in: path + required: true + schema: + type: integer + exclusiveMinimum: 0 + title: Try Number + - name: map_index + in: query + required: false + schema: + type: integer + default: -1 + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ExternalLogUrlResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/parseDagFile/{file_token}: + put: + tags: + - DAG Parsing + summary: Reparse Dag File + description: Request re-parsing a DAG file. + operationId: reparse_dag_file + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: file_token + in: path + required: true + schema: + type: string + title: File Token + responses: + '201': + description: Successful Response + content: + application/json: + schema: + type: 'null' + title: Response Reparse Dag File + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dagTags: + get: + tags: + - DAG + summary: Get Dag Tags + description: Get all DAG tags. + operationId: get_dag_tags + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - name + title: Order By + - name: tag_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Tag Name Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGTagCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagVersions/{version_number}: + get: + tags: + - DagVersion + summary: Get Dag Version + description: Get one Dag Version. + operationId: get_dag_version + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: version_number + in: path + required: true + schema: + type: integer + title: Version Number + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DagVersionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagVersions: + get: + tags: + - DagVersion + summary: Get Dag Versions + description: 'Get all DAG Versions. + + + This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions + for all DAGs.' + operationId: get_dag_versions + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: version_number + in: query + required: false + schema: + type: integer + title: Version Number + - name: bundle_name + in: query + required: false + schema: + type: string + title: Bundle Name + - name: bundle_version + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGVersionCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}: + patch: + tags: + - HumanInTheLoop + summary: Update Hitl Detail + description: Update a Human-in-the-loop detail. + operationId: update_hitl_detail + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateHITLDetailPayload' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetailResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - HumanInTheLoop + summary: Get Hitl Detail + description: Get a Human-in-the-loop detail of a specific task instance. + operationId: get_hitl_detail + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetail' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}/{map_index}: + patch: + tags: + - HumanInTheLoop + summary: Update Mapped Ti Hitl Detail + description: Update a Human-in-the-loop detail. + operationId: update_mapped_ti_hitl_detail + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateHITLDetailPayload' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetailResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - HumanInTheLoop + summary: Get Mapped Ti Hitl Detail + description: Get a Human-in-the-loop detail of a specific task instance. + operationId: get_mapped_ti_hitl_detail + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetail' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitlDetails/: + get: + tags: + - HumanInTheLoop + summary: Get Hitl Details + description: Get Human-in-the-loop details. + operationId: get_hitl_details + security: + - OAuth2PasswordBearer: [] + - HTTPBearer: [] + parameters: + - name: limit + in: query + required: false + schema: + type: integer + minimum: 0 + default: 50 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: array + items: + type: string + default: + - ti_id + title: Order By + - name: dag_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id + - name: dag_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Dag Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: dag_run_id + in: query + required: false + schema: + type: string + title: Dag Run Id + - name: task_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Task Id + - name: task_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Task Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: state + in: query + required: false + schema: + type: array + items: + type: string + title: State + - name: response_received + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Response Received + - name: user_id + in: query + required: false + schema: + type: array + items: + type: string + title: User Id + - name: subject_search + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Subject Search + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + - name: body_search + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Body Search + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetailCollection' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/monitor/health: + get: + tags: + - Monitor + summary: Get Health + operationId: get_health + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HealthInfoResponse' + /api/v2/version: + get: + tags: + - Version + summary: Get Version + description: Get version information. + operationId: get_version + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VersionInfo' + /api/v2/auth/login: + get: + tags: + - Login + summary: Login + description: Redirect to the login URL depending on the AuthManager configured. + operationId: login + parameters: + - name: next + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Next + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '307': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Temporary Redirect + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/auth/logout: + get: + tags: + - Login + summary: Logout + description: Logout the user. + operationId: logout + parameters: + - name: next + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Next + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '307': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Temporary Redirect + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/auth/refresh: + get: + tags: + - Login + summary: Refresh + description: Refresh the authentication token. + operationId: refresh + parameters: + - name: next + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Next + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '307': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Temporary Redirect + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' +components: + schemas: + AppBuilderMenuItemResponse: + properties: + name: + type: string + title: Name + href: + type: string + title: Href + category: + anyOf: + - type: string + - type: 'null' + title: Category + additionalProperties: true + type: object + required: + - name + - href + title: AppBuilderMenuItemResponse + description: Serializer for AppBuilder Menu Item responses. + AppBuilderViewResponse: + properties: + name: + anyOf: + - type: string + - type: 'null' + title: Name + category: + anyOf: + - type: string + - type: 'null' + title: Category + view: + anyOf: + - type: string + - type: 'null' + title: View + label: + anyOf: + - type: string + - type: 'null' + title: Label + additionalProperties: true + type: object + title: AppBuilderViewResponse + description: Serializer for AppBuilder View responses. + AssetAliasCollectionResponse: + properties: + asset_aliases: + items: + $ref: '#/components/schemas/AssetAliasResponse' + type: array + title: Asset Aliases + total_entries: + type: integer + title: Total Entries + type: object + required: + - asset_aliases + - total_entries + title: AssetAliasCollectionResponse + description: Asset alias collection response. + AssetAliasResponse: + properties: + id: + type: integer + title: Id + name: + type: string + title: Name + group: + type: string + title: Group + type: object + required: + - id + - name + - group + title: AssetAliasResponse + description: Asset alias serializer for responses. + AssetCollectionResponse: + properties: + assets: + items: + $ref: '#/components/schemas/AssetResponse' + type: array + title: Assets + total_entries: + type: integer + title: Total Entries + type: object + required: + - assets + - total_entries + title: AssetCollectionResponse + description: Asset collection response. + AssetEventCollectionResponse: + properties: + asset_events: + items: + $ref: '#/components/schemas/AssetEventResponse' + type: array + title: Asset Events + total_entries: + type: integer + title: Total Entries + type: object + required: + - asset_events + - total_entries + title: AssetEventCollectionResponse + description: Asset event collection response. + AssetEventResponse: + properties: + id: + type: integer + title: Id + asset_id: + type: integer + title: Asset Id + uri: + anyOf: + - type: string + - type: 'null' + title: Uri + name: + anyOf: + - type: string + - type: 'null' + title: Name + group: + anyOf: + - type: string + - type: 'null' + title: Group + extra: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Extra + source_task_id: + anyOf: + - type: string + - type: 'null' + title: Source Task Id + source_dag_id: + anyOf: + - type: string + - type: 'null' + title: Source Dag Id + source_run_id: + anyOf: + - type: string + - type: 'null' + title: Source Run Id + source_map_index: + type: integer + title: Source Map Index + created_dagruns: + items: + $ref: '#/components/schemas/DagRunAssetReference' + type: array + title: Created Dagruns + timestamp: + type: string + format: date-time + title: Timestamp + type: object + required: + - id + - asset_id + - source_map_index + - created_dagruns + - timestamp + title: AssetEventResponse + description: Asset event serializer for responses. + AssetResponse: + properties: + id: + type: integer + title: Id + name: + type: string + title: Name + uri: + type: string + title: Uri + group: + type: string + title: Group + extra: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Extra + created_at: + type: string + format: date-time + title: Created At + updated_at: + type: string + format: date-time + title: Updated At + scheduled_dags: + items: + $ref: '#/components/schemas/DagScheduleAssetReference' + type: array + title: Scheduled Dags + producing_tasks: + items: + $ref: '#/components/schemas/TaskOutletAssetReference' + type: array + title: Producing Tasks + consuming_tasks: + items: + $ref: '#/components/schemas/TaskInletAssetReference' + type: array + title: Consuming Tasks + aliases: + items: + $ref: '#/components/schemas/AssetAliasResponse' + type: array + title: Aliases + last_asset_event: + anyOf: + - $ref: '#/components/schemas/LastAssetEventResponse' + - type: 'null' + type: object + required: + - id + - name + - uri + - group + - created_at + - updated_at + - scheduled_dags + - producing_tasks + - consuming_tasks + - aliases + title: AssetResponse + description: Asset serializer for responses. + BackfillCollectionResponse: + properties: + backfills: + items: + $ref: '#/components/schemas/BackfillResponse' + type: array + title: Backfills + total_entries: + type: integer + title: Total Entries + type: object + required: + - backfills + - total_entries + title: BackfillCollectionResponse + description: Backfill Collection serializer for responses. + BackfillPostBody: + properties: + dag_id: + type: string + title: Dag Id + from_date: + type: string + format: date-time + title: From Date + to_date: + type: string + format: date-time + title: To Date + run_backwards: + type: boolean + title: Run Backwards + default: false + dag_run_conf: + additionalProperties: true + type: object + title: Dag Run Conf + default: {} + reprocess_behavior: + $ref: '#/components/schemas/ReprocessBehavior' + default: none + max_active_runs: + type: integer + title: Max Active Runs + default: 10 + additionalProperties: false + type: object + required: + - dag_id + - from_date + - to_date + title: BackfillPostBody + description: Object used for create backfill request. + BackfillResponse: + properties: + id: + type: integer + minimum: 0.0 + title: Id + dag_id: + type: string + title: Dag Id + from_date: + type: string + format: date-time + title: From Date + to_date: + type: string + format: date-time + title: To Date + dag_run_conf: + additionalProperties: true + type: object + title: Dag Run Conf + is_paused: + type: boolean + title: Is Paused + reprocess_behavior: + $ref: '#/components/schemas/ReprocessBehavior' + max_active_runs: + type: integer + title: Max Active Runs + created_at: + type: string + format: date-time + title: Created At + completed_at: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Completed At + updated_at: + type: string + format: date-time + title: Updated At + dag_display_name: + type: string + title: Dag Display Name + type: object + required: + - id + - dag_id + - from_date + - to_date + - dag_run_conf + - is_paused + - reprocess_behavior + - max_active_runs + - created_at + - completed_at + - updated_at + - dag_display_name + title: BackfillResponse + description: Base serializer for Backfill. + BaseInfoResponse: + properties: + status: + anyOf: + - type: string + - type: 'null' + title: Status + type: object + required: + - status + title: BaseInfoResponse + description: Base info serializer for responses. + BulkActionNotOnExistence: + type: string + enum: + - fail + - skip + title: BulkActionNotOnExistence + description: Bulk Action to be taken if the entity does not exist. + BulkActionOnExistence: + type: string + enum: + - fail + - skip + - overwrite + title: BulkActionOnExistence + description: Bulk Action to be taken if the entity already exists or not. + BulkActionResponse: + properties: + success: + items: + type: string + type: array + title: Success + description: A list of unique id/key representing successful operations. + default: [] + errors: + items: + additionalProperties: true + type: object + type: array + title: Errors + description: A list of errors encountered during the operation, each containing + details about the issue. + default: [] + type: object + title: BulkActionResponse + description: 'Serializer for individual bulk action responses. + + + Represents the outcome of a single bulk operation (create, update, or delete). + + The response includes a list of successful keys and any errors encountered + during the operation. + + This structure helps users understand which key actions succeeded and which + failed.' + BulkBody_BulkTaskInstanceBody_: + properties: + actions: + items: + oneOf: + - $ref: '#/components/schemas/BulkCreateAction_BulkTaskInstanceBody_' + - $ref: '#/components/schemas/BulkUpdateAction_BulkTaskInstanceBody_' + - $ref: '#/components/schemas/BulkDeleteAction_BulkTaskInstanceBody_' + type: array + title: Actions + additionalProperties: false + type: object + required: + - actions + title: BulkBody[BulkTaskInstanceBody] + BulkBody_ConnectionBody_: + properties: + actions: + items: + oneOf: + - $ref: '#/components/schemas/BulkCreateAction_ConnectionBody_' + - $ref: '#/components/schemas/BulkUpdateAction_ConnectionBody_' + - $ref: '#/components/schemas/BulkDeleteAction_ConnectionBody_' + type: array + title: Actions + additionalProperties: false + type: object + required: + - actions + title: BulkBody[ConnectionBody] + BulkBody_PoolBody_: + properties: + actions: + items: + oneOf: + - $ref: '#/components/schemas/BulkCreateAction_PoolBody_' + - $ref: '#/components/schemas/BulkUpdateAction_PoolBody_' + - $ref: '#/components/schemas/BulkDeleteAction_PoolBody_' + type: array + title: Actions + additionalProperties: false + type: object + required: + - actions + title: BulkBody[PoolBody] + BulkBody_VariableBody_: + properties: + actions: + items: + oneOf: + - $ref: '#/components/schemas/BulkCreateAction_VariableBody_' + - $ref: '#/components/schemas/BulkUpdateAction_VariableBody_' + - $ref: '#/components/schemas/BulkDeleteAction_VariableBody_' + type: array + title: Actions + additionalProperties: false + type: object + required: + - actions + title: BulkBody[VariableBody] + BulkCreateAction_BulkTaskInstanceBody_: + properties: + action: + type: string + const: create + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/BulkTaskInstanceBody' + type: array + title: Entities + description: A list of entities to be created. + action_on_existence: + $ref: '#/components/schemas/BulkActionOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkCreateAction[BulkTaskInstanceBody] + BulkCreateAction_ConnectionBody_: + properties: + action: + type: string + const: create + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/ConnectionBody' + type: array + title: Entities + description: A list of entities to be created. + action_on_existence: + $ref: '#/components/schemas/BulkActionOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkCreateAction[ConnectionBody] + BulkCreateAction_PoolBody_: + properties: + action: + type: string + const: create + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/PoolBody' + type: array + title: Entities + description: A list of entities to be created. + action_on_existence: + $ref: '#/components/schemas/BulkActionOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkCreateAction[PoolBody] + BulkCreateAction_VariableBody_: + properties: + action: + type: string + const: create + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/VariableBody' + type: array + title: Entities + description: A list of entities to be created. + action_on_existence: + $ref: '#/components/schemas/BulkActionOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkCreateAction[VariableBody] + BulkDeleteAction_BulkTaskInstanceBody_: + properties: + action: + type: string + const: delete + title: Action + description: The action to be performed on the entities. + entities: + items: + type: string + type: array + title: Entities + description: A list of entity id/key to be deleted. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkDeleteAction[BulkTaskInstanceBody] + BulkDeleteAction_ConnectionBody_: + properties: + action: + type: string + const: delete + title: Action + description: The action to be performed on the entities. + entities: + items: + type: string + type: array + title: Entities + description: A list of entity id/key to be deleted. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkDeleteAction[ConnectionBody] + BulkDeleteAction_PoolBody_: + properties: + action: + type: string + const: delete + title: Action + description: The action to be performed on the entities. + entities: + items: + type: string + type: array + title: Entities + description: A list of entity id/key to be deleted. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkDeleteAction[PoolBody] + BulkDeleteAction_VariableBody_: + properties: + action: + type: string + const: delete + title: Action + description: The action to be performed on the entities. + entities: + items: + type: string + type: array + title: Entities + description: A list of entity id/key to be deleted. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkDeleteAction[VariableBody] + BulkResponse: + properties: + create: + anyOf: + - $ref: '#/components/schemas/BulkActionResponse' + - type: 'null' + description: Details of the bulk create operation, including successful + keys and errors. + update: + anyOf: + - $ref: '#/components/schemas/BulkActionResponse' + - type: 'null' + description: Details of the bulk update operation, including successful + keys and errors. + delete: + anyOf: + - $ref: '#/components/schemas/BulkActionResponse' + - type: 'null' + description: Details of the bulk delete operation, including successful + keys and errors. + type: object + title: BulkResponse + description: 'Serializer for responses to bulk entity operations. + + + This represents the results of create, update, and delete actions performed + on entity in bulk. + + Each action (if requested) is represented as a field containing details about + successful keys and any encountered errors. + + Fields are populated in the response only if the respective action was part + of the request, else are set None.' + BulkTaskInstanceBody: + properties: + new_state: + anyOf: + - $ref: '#/components/schemas/TaskInstanceState' + - type: 'null' + note: + anyOf: + - type: string + maxLength: 1000 + - type: 'null' + title: Note + include_upstream: + type: boolean + title: Include Upstream + default: false + include_downstream: + type: boolean + title: Include Downstream + default: false + include_future: + type: boolean + title: Include Future + default: false + include_past: + type: boolean + title: Include Past + default: false + task_id: + type: string + title: Task Id + map_index: + anyOf: + - type: integer + - type: 'null' + title: Map Index + additionalProperties: false + type: object + required: + - task_id + title: BulkTaskInstanceBody + description: Request body for bulk update, and delete task instances. + BulkUpdateAction_BulkTaskInstanceBody_: + properties: + action: + type: string + const: update + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/BulkTaskInstanceBody' + type: array + title: Entities + description: A list of entities to be updated. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkUpdateAction[BulkTaskInstanceBody] + BulkUpdateAction_ConnectionBody_: + properties: + action: + type: string + const: update + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/ConnectionBody' + type: array + title: Entities + description: A list of entities to be updated. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkUpdateAction[ConnectionBody] + BulkUpdateAction_PoolBody_: + properties: + action: + type: string + const: update + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/PoolBody' + type: array + title: Entities + description: A list of entities to be updated. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkUpdateAction[PoolBody] + BulkUpdateAction_VariableBody_: + properties: + action: + type: string + const: update + title: Action + description: The action to be performed on the entities. + entities: + items: + $ref: '#/components/schemas/VariableBody' + type: array + title: Entities + description: A list of entities to be updated. + action_on_non_existence: + $ref: '#/components/schemas/BulkActionNotOnExistence' + default: fail + additionalProperties: false + type: object + required: + - action + - entities + title: BulkUpdateAction[VariableBody] + ClearTaskInstancesBody: + properties: + dry_run: + type: boolean + title: Dry Run + default: true + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + only_failed: + type: boolean + title: Only Failed + default: true + only_running: + type: boolean + title: Only Running + default: false + reset_dag_runs: + type: boolean + title: Reset Dag Runs + default: true + task_ids: + anyOf: + - items: + anyOf: + - type: string + - prefixItems: + - type: string + - type: integer + type: array + maxItems: 2 + minItems: 2 + type: array + - type: 'null' + title: Task Ids + dag_run_id: + anyOf: + - type: string + - type: 'null' + title: Dag Run Id + include_upstream: + type: boolean + title: Include Upstream + default: false + include_downstream: + type: boolean + title: Include Downstream + default: false + include_future: + type: boolean + title: Include Future + default: false + include_past: + type: boolean + title: Include Past + default: false + run_on_latest_version: + type: boolean + title: Run On Latest Version + description: (Experimental) Run on the latest bundle version of the dag + after clearing the task instances. + default: false + additionalProperties: false + type: object + title: ClearTaskInstancesBody + description: Request body for Clear Task Instances endpoint. + Config: + properties: + sections: + items: + $ref: '#/components/schemas/ConfigSection' + type: array + title: Sections + additionalProperties: false + type: object + required: + - sections + title: Config + description: List of config sections with their options. + ConfigOption: + properties: + key: + type: string + title: Key + value: + anyOf: + - type: string + - prefixItems: + - type: string + - type: string + type: array + maxItems: 2 + minItems: 2 + title: Value + additionalProperties: false + type: object + required: + - key + - value + title: ConfigOption + description: Config option. + ConfigSection: + properties: + name: + type: string + title: Name + options: + items: + $ref: '#/components/schemas/ConfigOption' + type: array + title: Options + additionalProperties: false + type: object + required: + - name + - options + title: ConfigSection + description: Config Section Schema. + ConnectionBody: + properties: + connection_id: + type: string + maxLength: 200 + pattern: ^[\w.-]+$ + title: Connection Id + conn_type: + type: string + title: Conn Type + description: + anyOf: + - type: string + - type: 'null' + title: Description + host: + anyOf: + - type: string + - type: 'null' + title: Host + login: + anyOf: + - type: string + - type: 'null' + title: Login + schema: + anyOf: + - type: string + - type: 'null' + title: Schema + port: + anyOf: + - type: integer + - type: 'null' + title: Port + password: + anyOf: + - type: string + - type: 'null' + title: Password + extra: + anyOf: + - type: string + - type: 'null' + title: Extra + additionalProperties: false + type: object + required: + - connection_id + - conn_type + title: ConnectionBody + description: Connection Serializer for requests body. + ConnectionCollectionResponse: + properties: + connections: + items: + $ref: '#/components/schemas/ConnectionResponse' + type: array + title: Connections + total_entries: + type: integer + title: Total Entries + type: object + required: + - connections + - total_entries + title: ConnectionCollectionResponse + description: Connection Collection serializer for responses. + ConnectionResponse: + properties: + connection_id: + type: string + title: Connection Id + conn_type: + type: string + title: Conn Type + description: + anyOf: + - type: string + - type: 'null' + title: Description + host: + anyOf: + - type: string + - type: 'null' + title: Host + login: + anyOf: + - type: string + - type: 'null' + title: Login + schema: + anyOf: + - type: string + - type: 'null' + title: Schema + port: + anyOf: + - type: integer + - type: 'null' + title: Port + password: + anyOf: + - type: string + - type: 'null' + title: Password + extra: + anyOf: + - type: string + - type: 'null' + title: Extra + type: object + required: + - connection_id + - conn_type + - description + - host + - login + - schema + - port + - password + - extra + title: ConnectionResponse + description: Connection serializer for responses. + ConnectionTestResponse: + properties: + status: + type: boolean + title: Status + message: + type: string + title: Message + type: object + required: + - status + - message + title: ConnectionTestResponse + description: Connection Test serializer for responses. + CreateAssetEventsBody: + properties: + asset_id: + type: integer + title: Asset Id + extra: + additionalProperties: true + type: object + title: Extra + additionalProperties: false + type: object + required: + - asset_id + title: CreateAssetEventsBody + description: Create asset events request. + DAGCollectionResponse: + properties: + dags: + items: + $ref: '#/components/schemas/DAGResponse' + type: array + title: Dags + total_entries: + type: integer + title: Total Entries + type: object + required: + - dags + - total_entries + title: DAGCollectionResponse + description: DAG Collection serializer for responses. + DAGDetailsResponse: + properties: + dag_id: + type: string + title: Dag Id + dag_display_name: + type: string + title: Dag Display Name + is_paused: + type: boolean + title: Is Paused + is_stale: + type: boolean + title: Is Stale + last_parsed_time: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Parsed Time + last_expired: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Expired + bundle_name: + anyOf: + - type: string + - type: 'null' + title: Bundle Name + bundle_version: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + relative_fileloc: + anyOf: + - type: string + - type: 'null' + title: Relative Fileloc + fileloc: + type: string + title: Fileloc + description: + anyOf: + - type: string + - type: 'null' + title: Description + timetable_summary: + anyOf: + - type: string + - type: 'null' + title: Timetable Summary + timetable_description: + anyOf: + - type: string + - type: 'null' + title: Timetable Description + tags: + items: + $ref: '#/components/schemas/DagTagResponse' + type: array + title: Tags + max_active_tasks: + type: integer + title: Max Active Tasks + max_active_runs: + anyOf: + - type: integer + - type: 'null' + title: Max Active Runs + max_consecutive_failed_dag_runs: + type: integer + title: Max Consecutive Failed Dag Runs + has_task_concurrency_limits: + type: boolean + title: Has Task Concurrency Limits + has_import_errors: + type: boolean + title: Has Import Errors + next_dagrun_logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Logical Date + next_dagrun_data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Data Interval Start + next_dagrun_data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Data Interval End + next_dagrun_run_after: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Run After + owners: + items: + type: string + type: array + title: Owners + catchup: + type: boolean + title: Catchup + dag_run_timeout: + anyOf: + - type: string + format: duration + - type: 'null' + title: Dag Run Timeout + asset_expression: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Asset Expression + doc_md: + anyOf: + - type: string + - type: 'null' + title: Doc Md + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + is_paused_upon_creation: + anyOf: + - type: boolean + - type: 'null' + title: Is Paused Upon Creation + params: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Params + render_template_as_native_obj: + type: boolean + title: Render Template As Native Obj + template_search_path: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Template Search Path + timezone: + anyOf: + - type: string + - type: 'null' + title: Timezone + last_parsed: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Parsed + default_args: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Default Args + owner_links: + anyOf: + - additionalProperties: + type: string + type: object + - type: 'null' + title: Owner Links + file_token: + type: string + title: File Token + description: Return file token. + readOnly: true + concurrency: + type: integer + title: Concurrency + description: Return max_active_tasks as concurrency. + readOnly: true + latest_dag_version: + anyOf: + - $ref: '#/components/schemas/DagVersionResponse' + - type: 'null' + description: Return the latest DagVersion. + readOnly: true + type: object + required: + - dag_id + - dag_display_name + - is_paused + - is_stale + - last_parsed_time + - last_expired + - bundle_name + - bundle_version + - relative_fileloc + - fileloc + - description + - timetable_summary + - timetable_description + - tags + - max_active_tasks + - max_active_runs + - max_consecutive_failed_dag_runs + - has_task_concurrency_limits + - has_import_errors + - next_dagrun_logical_date + - next_dagrun_data_interval_start + - next_dagrun_data_interval_end + - next_dagrun_run_after + - owners + - catchup + - dag_run_timeout + - asset_expression + - doc_md + - start_date + - end_date + - is_paused_upon_creation + - params + - render_template_as_native_obj + - template_search_path + - timezone + - last_parsed + - default_args + - file_token + - concurrency + - latest_dag_version + title: DAGDetailsResponse + description: Specific serializer for DAG Details responses. + DAGPatchBody: + properties: + is_paused: + type: boolean + title: Is Paused + additionalProperties: false + type: object + required: + - is_paused + title: DAGPatchBody + description: Dag Serializer for updatable bodies. + DAGResponse: + properties: + dag_id: + type: string + title: Dag Id + dag_display_name: + type: string + title: Dag Display Name + is_paused: + type: boolean + title: Is Paused + is_stale: + type: boolean + title: Is Stale + last_parsed_time: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Parsed Time + last_expired: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Expired + bundle_name: + anyOf: + - type: string + - type: 'null' + title: Bundle Name + bundle_version: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + relative_fileloc: + anyOf: + - type: string + - type: 'null' + title: Relative Fileloc + fileloc: + type: string + title: Fileloc + description: + anyOf: + - type: string + - type: 'null' + title: Description + timetable_summary: + anyOf: + - type: string + - type: 'null' + title: Timetable Summary + timetable_description: + anyOf: + - type: string + - type: 'null' + title: Timetable Description + tags: + items: + $ref: '#/components/schemas/DagTagResponse' + type: array + title: Tags + max_active_tasks: + type: integer + title: Max Active Tasks + max_active_runs: + anyOf: + - type: integer + - type: 'null' + title: Max Active Runs + max_consecutive_failed_dag_runs: + type: integer + title: Max Consecutive Failed Dag Runs + has_task_concurrency_limits: + type: boolean + title: Has Task Concurrency Limits + has_import_errors: + type: boolean + title: Has Import Errors + next_dagrun_logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Logical Date + next_dagrun_data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Data Interval Start + next_dagrun_data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Data Interval End + next_dagrun_run_after: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Run After + owners: + items: + type: string + type: array + title: Owners + file_token: + type: string + title: File Token + description: Return file token. + readOnly: true + type: object + required: + - dag_id + - dag_display_name + - is_paused + - is_stale + - last_parsed_time + - last_expired + - bundle_name + - bundle_version + - relative_fileloc + - fileloc + - description + - timetable_summary + - timetable_description + - tags + - max_active_tasks + - max_active_runs + - max_consecutive_failed_dag_runs + - has_task_concurrency_limits + - has_import_errors + - next_dagrun_logical_date + - next_dagrun_data_interval_start + - next_dagrun_data_interval_end + - next_dagrun_run_after + - owners + - file_token + title: DAGResponse + description: DAG serializer for responses. + DAGRunClearBody: + properties: + dry_run: + type: boolean + title: Dry Run + default: true + only_failed: + type: boolean + title: Only Failed + default: false + run_on_latest_version: + type: boolean + title: Run On Latest Version + description: (Experimental) Run on the latest bundle version of the Dag + after clearing the Dag Run. + default: false + additionalProperties: false + type: object + title: DAGRunClearBody + description: DAG Run serializer for clear endpoint body. + DAGRunCollectionResponse: + properties: + dag_runs: + items: + $ref: '#/components/schemas/DAGRunResponse' + type: array + title: Dag Runs + total_entries: + type: integer + title: Total Entries + type: object + required: + - dag_runs + - total_entries + title: DAGRunCollectionResponse + description: DAG Run Collection serializer for responses. + DAGRunPatchBody: + properties: + state: + anyOf: + - $ref: '#/components/schemas/DAGRunPatchStates' + - type: 'null' + note: + anyOf: + - type: string + maxLength: 1000 + - type: 'null' + title: Note + additionalProperties: false + type: object + title: DAGRunPatchBody + description: DAG Run Serializer for PATCH requests. + DAGRunPatchStates: + type: string + enum: + - queued + - success + - failed + title: DAGRunPatchStates + description: Enum for DAG Run states when updating a DAG Run. + DAGRunResponse: + properties: + dag_run_id: + type: string + title: Dag Run Id + dag_id: + type: string + title: Dag Id + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + queued_at: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Queued At + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + duration: + anyOf: + - type: number + - type: 'null' + title: Duration + data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval Start + data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval End + run_after: + type: string + format: date-time + title: Run After + last_scheduling_decision: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Scheduling Decision + run_type: + $ref: '#/components/schemas/DagRunType' + state: + $ref: '#/components/schemas/DagRunState' + triggered_by: + anyOf: + - $ref: '#/components/schemas/DagRunTriggeredByType' + - type: 'null' + triggering_user_name: + anyOf: + - type: string + - type: 'null' + title: Triggering User Name + conf: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Conf + note: + anyOf: + - type: string + - type: 'null' + title: Note + dag_versions: + items: + $ref: '#/components/schemas/DagVersionResponse' + type: array + title: Dag Versions + bundle_version: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + dag_display_name: + type: string + title: Dag Display Name + type: object + required: + - dag_run_id + - dag_id + - logical_date + - queued_at + - start_date + - end_date + - duration + - data_interval_start + - data_interval_end + - run_after + - last_scheduling_decision + - run_type + - state + - triggered_by + - triggering_user_name + - conf + - note + - dag_versions + - bundle_version + - dag_display_name + title: DAGRunResponse + description: DAG Run serializer for responses. + DAGRunsBatchBody: + properties: + order_by: + anyOf: + - type: string + - type: 'null' + title: Order By + page_offset: + type: integer + minimum: 0.0 + title: Page Offset + default: 0 + page_limit: + type: integer + minimum: 0.0 + title: Page Limit + default: 100 + dag_ids: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Dag Ids + states: + anyOf: + - items: + anyOf: + - $ref: '#/components/schemas/DagRunState' + - type: 'null' + type: array + - type: 'null' + title: States + run_after_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Gte + run_after_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Lte + logical_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Gte + logical_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Lte + start_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + start_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + end_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + end_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte + additionalProperties: false + type: object + title: DAGRunsBatchBody + description: List DAG Runs body for batch endpoint. + DAGSourceResponse: + properties: + content: + anyOf: + - type: string + - type: 'null' + title: Content + dag_id: + type: string + title: Dag Id + version_number: + anyOf: + - type: integer + - type: 'null' + title: Version Number + dag_display_name: + type: string + title: Dag Display Name + type: object + required: + - content + - dag_id + - version_number + - dag_display_name + title: DAGSourceResponse + description: DAG Source serializer for responses. + DAGTagCollectionResponse: + properties: + tags: + items: + type: string + type: array + title: Tags + total_entries: + type: integer + title: Total Entries + type: object + required: + - tags + - total_entries + title: DAGTagCollectionResponse + description: DAG Tags Collection serializer for responses. + DAGVersionCollectionResponse: + properties: + dag_versions: + items: + $ref: '#/components/schemas/DagVersionResponse' + type: array + title: Dag Versions + total_entries: + type: integer + title: Total Entries + type: object + required: + - dag_versions + - total_entries + title: DAGVersionCollectionResponse + description: DAG Version Collection serializer for responses. + DAGWarningCollectionResponse: + properties: + dag_warnings: + items: + $ref: '#/components/schemas/DAGWarningResponse' + type: array + title: Dag Warnings + total_entries: + type: integer + title: Total Entries + type: object + required: + - dag_warnings + - total_entries + title: DAGWarningCollectionResponse + description: DAG warning collection serializer for responses. + DAGWarningResponse: + properties: + dag_id: + type: string + title: Dag Id + warning_type: + $ref: '#/components/schemas/DagWarningType' + message: + type: string + title: Message + timestamp: + type: string + format: date-time + title: Timestamp + type: object + required: + - dag_id + - warning_type + - message + - timestamp + title: DAGWarningResponse + description: DAG Warning serializer for responses. + DagProcessorInfoResponse: + properties: + status: + anyOf: + - type: string + - type: 'null' + title: Status + latest_dag_processor_heartbeat: + anyOf: + - type: string + - type: 'null' + title: Latest Dag Processor Heartbeat + type: object + required: + - status + - latest_dag_processor_heartbeat + title: DagProcessorInfoResponse + description: DagProcessor info serializer for responses. + DagRunAssetReference: + properties: + run_id: + type: string + title: Run Id + dag_id: + type: string + title: Dag Id + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + start_date: + type: string + format: date-time + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + state: + type: string + title: State + data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval Start + data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval End + additionalProperties: false + type: object + required: + - run_id + - dag_id + - logical_date + - start_date + - end_date + - state + - data_interval_start + - data_interval_end + title: DagRunAssetReference + description: DAGRun serializer for asset responses. + DagRunState: + type: string + enum: + - queued + - running + - success + - failed + title: DagRunState + description: 'All possible states that a DagRun can be in. + + + These are "shared" with TaskInstanceState in some parts of the code, + + so please ensure that their values always match the ones with the + + same name in TaskInstanceState.' + DagRunTriggeredByType: + type: string + enum: + - cli + - operator + - rest_api + - ui + - test + - timetable + - asset + - backfill + title: DagRunTriggeredByType + description: Class with TriggeredBy types for DagRun. + DagRunType: + type: string + enum: + - backfill + - scheduled + - manual + - asset_triggered + title: DagRunType + description: Class with DagRun types. + DagScheduleAssetReference: + properties: + dag_id: + type: string + title: Dag Id + created_at: + type: string + format: date-time + title: Created At + updated_at: + type: string + format: date-time + title: Updated At + additionalProperties: false + type: object + required: + - dag_id + - created_at + - updated_at + title: DagScheduleAssetReference + description: DAG schedule reference serializer for assets. + DagStatsCollectionResponse: + properties: + dags: + items: + $ref: '#/components/schemas/DagStatsResponse' + type: array + title: Dags + total_entries: + type: integer + title: Total Entries + type: object + required: + - dags + - total_entries + title: DagStatsCollectionResponse + description: DAG Stats Collection serializer for responses. + DagStatsResponse: + properties: + dag_id: + type: string + title: Dag Id + dag_display_name: + type: string + title: Dag Display Name + stats: + items: + $ref: '#/components/schemas/DagStatsStateResponse' + type: array + title: Stats + type: object + required: + - dag_id + - dag_display_name + - stats + title: DagStatsResponse + description: DAG Stats serializer for responses. + DagStatsStateResponse: + properties: + state: + $ref: '#/components/schemas/DagRunState' + count: + type: integer + title: Count + type: object + required: + - state + - count + title: DagStatsStateResponse + description: DagStatsState serializer for responses. + DagTagResponse: + properties: + name: + type: string + title: Name + dag_id: + type: string + title: Dag Id + type: object + required: + - name + - dag_id + title: DagTagResponse + description: DAG Tag serializer for responses. + DagVersionResponse: + properties: + id: + type: string + format: uuid + title: Id + version_number: + type: integer + title: Version Number + dag_id: + type: string + title: Dag Id + bundle_name: + anyOf: + - type: string + - type: 'null' + title: Bundle Name + bundle_version: + anyOf: + - type: string + - type: 'null' + title: Bundle Version + created_at: + type: string + format: date-time + title: Created At + dag_display_name: + type: string + title: Dag Display Name + bundle_url: + anyOf: + - type: string + - type: 'null' + title: Bundle Url + readOnly: true + type: object + required: + - id + - version_number + - dag_id + - bundle_name + - bundle_version + - created_at + - dag_display_name + - bundle_url + title: DagVersionResponse + description: Dag Version serializer for responses. + DagWarningType: + type: string + enum: + - asset conflict + - non-existent pool + title: DagWarningType + description: 'Enum for DAG warning types. + + + This is the set of allowable values for the ``warning_type`` field + + in the DagWarning model.' + DryRunBackfillCollectionResponse: + properties: + backfills: + items: + $ref: '#/components/schemas/DryRunBackfillResponse' + type: array + title: Backfills + total_entries: + type: integer + title: Total Entries + type: object + required: + - backfills + - total_entries + title: DryRunBackfillCollectionResponse + description: Backfill collection serializer for responses in dry-run mode. + DryRunBackfillResponse: + properties: + logical_date: + type: string + format: date-time + title: Logical Date + type: object + required: + - logical_date + title: DryRunBackfillResponse + description: Backfill serializer for responses in dry-run mode. + EventLogCollectionResponse: + properties: + event_logs: + items: + $ref: '#/components/schemas/EventLogResponse' + type: array + title: Event Logs + total_entries: + type: integer + title: Total Entries + type: object + required: + - event_logs + - total_entries + title: EventLogCollectionResponse + description: Event Log Collection Response. + EventLogResponse: + properties: + event_log_id: + type: integer + title: Event Log Id + when: + type: string + format: date-time + title: When + dag_id: + anyOf: + - type: string + - type: 'null' + title: Dag Id + task_id: + anyOf: + - type: string + - type: 'null' + title: Task Id + run_id: + anyOf: + - type: string + - type: 'null' + title: Run Id + map_index: + anyOf: + - type: integer + - type: 'null' + title: Map Index + try_number: + anyOf: + - type: integer + - type: 'null' + title: Try Number + event: + type: string + title: Event + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + owner: + anyOf: + - type: string + - type: 'null' + title: Owner + extra: + anyOf: + - type: string + - type: 'null' + title: Extra + dag_display_name: + anyOf: + - type: string + - type: 'null' + title: Dag Display Name + type: object + required: + - event_log_id + - when + - dag_id + - task_id + - run_id + - map_index + - try_number + - event + - logical_date + - owner + - extra + title: EventLogResponse + description: Event Log Response. + ExternalLogUrlResponse: + properties: + url: + type: string + title: Url + type: object + required: + - url + title: ExternalLogUrlResponse + description: Response for the external log URL endpoint. + ExternalViewResponse: + properties: + name: + type: string + title: Name + icon: + anyOf: + - type: string + - type: 'null' + title: Icon + icon_dark_mode: + anyOf: + - type: string + - type: 'null' + title: Icon Dark Mode + url_route: + anyOf: + - type: string + - type: 'null' + title: Url Route + category: + anyOf: + - type: string + - type: 'null' + title: Category + href: + type: string + title: Href + destination: + type: string + enum: + - nav + - dag + - dag_run + - task + - task_instance + title: Destination + default: nav + additionalProperties: true + type: object + required: + - name + - href + title: ExternalViewResponse + description: Serializer for External View Plugin responses. + ExtraLinkCollectionResponse: + properties: + extra_links: + additionalProperties: + anyOf: + - type: string + - type: 'null' + type: object + title: Extra Links + total_entries: + type: integer + title: Total Entries + type: object + required: + - extra_links + - total_entries + title: ExtraLinkCollectionResponse + description: Extra Links Response. + FastAPIAppResponse: + properties: + app: + type: string + title: App + url_prefix: + type: string + title: Url Prefix + name: + type: string + title: Name + additionalProperties: true + type: object + required: + - app + - url_prefix + - name + title: FastAPIAppResponse + description: Serializer for Plugin FastAPI App responses. + FastAPIRootMiddlewareResponse: + properties: + middleware: + type: string + title: Middleware + name: + type: string + title: Name + additionalProperties: true + type: object + required: + - middleware + - name + title: FastAPIRootMiddlewareResponse + description: Serializer for Plugin FastAPI root middleware responses. + HITLDetail: + properties: + task_instance: + $ref: '#/components/schemas/TaskInstanceResponse' + options: + items: + type: string + type: array + minItems: 1 + title: Options + subject: + type: string + title: Subject + body: + anyOf: + - type: string + - type: 'null' + title: Body + defaults: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Defaults + multiple: + type: boolean + title: Multiple + default: false + params: + additionalProperties: true + type: object + title: Params + user_id: + anyOf: + - type: string + - type: 'null' + title: User Id + response_at: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Response At + chosen_options: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Chosen Options + params_input: + additionalProperties: true + type: object + title: Params Input + response_received: + type: boolean + title: Response Received + default: false + type: object + required: + - task_instance + - options + - subject + title: HITLDetail + description: Schema for Human-in-the-loop detail. + HITLDetailCollection: + properties: + hitl_details: + items: + $ref: '#/components/schemas/HITLDetail' + type: array + title: Hitl Details + total_entries: + type: integer + title: Total Entries + type: object + required: + - hitl_details + - total_entries + title: HITLDetailCollection + description: Schema for a collection of Human-in-the-loop details. + HITLDetailResponse: + properties: + user_id: + type: string + title: User Id + response_at: + type: string + format: date-time + title: Response At + chosen_options: + items: + type: string + type: array + minItems: 1 + title: Chosen Options + params_input: + additionalProperties: true + type: object + title: Params Input + type: object + required: + - user_id + - response_at + - chosen_options + title: HITLDetailResponse + description: Response of updating a Human-in-the-loop detail. + HTTPExceptionResponse: + properties: + detail: + anyOf: + - type: string + - additionalProperties: true + type: object + title: Detail + type: object + required: + - detail + title: HTTPExceptionResponse + description: HTTPException Model used for error response. + HTTPValidationError: + properties: + detail: + items: + $ref: '#/components/schemas/ValidationError' + type: array + title: Detail + type: object + title: HTTPValidationError + HealthInfoResponse: + properties: + metadatabase: + $ref: '#/components/schemas/BaseInfoResponse' + scheduler: + $ref: '#/components/schemas/SchedulerInfoResponse' + triggerer: + $ref: '#/components/schemas/TriggererInfoResponse' + dag_processor: + anyOf: + - $ref: '#/components/schemas/DagProcessorInfoResponse' + - type: 'null' + type: object + required: + - metadatabase + - scheduler + - triggerer + title: HealthInfoResponse + description: Health serializer for responses. + ImportErrorCollectionResponse: + properties: + import_errors: + items: + $ref: '#/components/schemas/ImportErrorResponse' + type: array + title: Import Errors + total_entries: + type: integer + title: Total Entries + type: object + required: + - import_errors + - total_entries + title: ImportErrorCollectionResponse + description: Import Error Collection Response. + ImportErrorResponse: + properties: + import_error_id: + type: integer + title: Import Error Id + timestamp: + type: string + format: date-time + title: Timestamp + filename: + type: string + title: Filename + bundle_name: + anyOf: + - type: string + - type: 'null' + title: Bundle Name + stack_trace: + type: string + title: Stack Trace + type: object + required: + - import_error_id + - timestamp + - filename + - bundle_name + - stack_trace + title: ImportErrorResponse + description: Import Error Response. + JobCollectionResponse: + properties: + jobs: + items: + $ref: '#/components/schemas/JobResponse' + type: array + title: Jobs + total_entries: + type: integer + title: Total Entries + type: object + required: + - jobs + - total_entries + title: JobCollectionResponse + description: Job Collection Response. + JobResponse: + properties: + id: + type: integer + title: Id + dag_id: + anyOf: + - type: string + - type: 'null' + title: Dag Id + state: + anyOf: + - type: string + - type: 'null' + title: State + job_type: + anyOf: + - type: string + - type: 'null' + title: Job Type + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + latest_heartbeat: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Latest Heartbeat + executor_class: + anyOf: + - type: string + - type: 'null' + title: Executor Class + hostname: + anyOf: + - type: string + - type: 'null' + title: Hostname + unixname: + anyOf: + - type: string + - type: 'null' + title: Unixname + dag_display_name: + anyOf: + - type: string + - type: 'null' + title: Dag Display Name + type: object + required: + - id + - dag_id + - state + - job_type + - start_date + - end_date + - latest_heartbeat + - executor_class + - hostname + - unixname + title: JobResponse + description: Job serializer for responses. + JsonValue: {} + LastAssetEventResponse: + properties: + id: + anyOf: + - type: integer + minimum: 0.0 + - type: 'null' + title: Id + timestamp: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Timestamp + type: object + title: LastAssetEventResponse + description: Last asset event response serializer. + PatchTaskInstanceBody: + properties: + new_state: + anyOf: + - $ref: '#/components/schemas/TaskInstanceState' + - type: 'null' + note: + anyOf: + - type: string + maxLength: 1000 + - type: 'null' + title: Note + include_upstream: + type: boolean + title: Include Upstream + default: false + include_downstream: + type: boolean + title: Include Downstream + default: false + include_future: + type: boolean + title: Include Future + default: false + include_past: + type: boolean + title: Include Past + default: false + additionalProperties: false + type: object + title: PatchTaskInstanceBody + description: Request body for Clear Task Instances endpoint. + PluginCollectionResponse: + properties: + plugins: + items: + $ref: '#/components/schemas/PluginResponse' + type: array + title: Plugins + total_entries: + type: integer + title: Total Entries + type: object + required: + - plugins + - total_entries + title: PluginCollectionResponse + description: Plugin Collection serializer. + PluginImportErrorCollectionResponse: + properties: + import_errors: + items: + $ref: '#/components/schemas/PluginImportErrorResponse' + type: array + title: Import Errors + total_entries: + type: integer + title: Total Entries + type: object + required: + - import_errors + - total_entries + title: PluginImportErrorCollectionResponse + description: Plugin Import Error Collection serializer. + PluginImportErrorResponse: + properties: + source: + type: string + title: Source + error: + type: string + title: Error + type: object + required: + - source + - error + title: PluginImportErrorResponse + description: Plugin Import Error serializer for responses. + PluginResponse: + properties: + name: + type: string + title: Name + macros: + items: + type: string + type: array + title: Macros + flask_blueprints: + items: + type: string + type: array + title: Flask Blueprints + fastapi_apps: + items: + $ref: '#/components/schemas/FastAPIAppResponse' + type: array + title: Fastapi Apps + fastapi_root_middlewares: + items: + $ref: '#/components/schemas/FastAPIRootMiddlewareResponse' + type: array + title: Fastapi Root Middlewares + external_views: + items: + $ref: '#/components/schemas/ExternalViewResponse' + type: array + title: External Views + description: Aggregate all external views. Both 'external_views' and 'appbuilder_menu_items' + are included here. + react_apps: + items: + $ref: '#/components/schemas/ReactAppResponse' + type: array + title: React Apps + appbuilder_views: + items: + $ref: '#/components/schemas/AppBuilderViewResponse' + type: array + title: Appbuilder Views + appbuilder_menu_items: + items: + $ref: '#/components/schemas/AppBuilderMenuItemResponse' + type: array + title: Appbuilder Menu Items + deprecated: true + global_operator_extra_links: + items: + type: string + type: array + title: Global Operator Extra Links + operator_extra_links: + items: + type: string + type: array + title: Operator Extra Links + source: + type: string + title: Source + listeners: + items: + type: string + type: array + title: Listeners + timetables: + items: + type: string + type: array + title: Timetables + type: object + required: + - name + - macros + - flask_blueprints + - fastapi_apps + - fastapi_root_middlewares + - external_views + - react_apps + - appbuilder_views + - appbuilder_menu_items + - global_operator_extra_links + - operator_extra_links + - source + - listeners + - timetables + title: PluginResponse + description: Plugin serializer. + PoolBody: + properties: + name: + type: string + maxLength: 256 + title: Name + slots: + type: integer + title: Slots + description: + anyOf: + - type: string + - type: 'null' + title: Description + include_deferred: + type: boolean + title: Include Deferred + default: false + additionalProperties: false + type: object + required: + - name + - slots + title: PoolBody + description: Pool serializer for post bodies. + PoolCollectionResponse: + properties: + pools: + items: + $ref: '#/components/schemas/PoolResponse' + type: array + title: Pools + total_entries: + type: integer + title: Total Entries + type: object + required: + - pools + - total_entries + title: PoolCollectionResponse + description: Pool Collection serializer for responses. + PoolPatchBody: + properties: + pool: + anyOf: + - type: string + - type: 'null' + title: Pool + slots: + anyOf: + - type: integer + - type: 'null' + title: Slots + description: + anyOf: + - type: string + - type: 'null' + title: Description + include_deferred: + anyOf: + - type: boolean + - type: 'null' + title: Include Deferred + additionalProperties: false + type: object + title: PoolPatchBody + description: Pool serializer for patch bodies. + PoolResponse: + properties: + name: + type: string + title: Name + slots: + type: integer + title: Slots + description: + anyOf: + - type: string + - type: 'null' + title: Description + include_deferred: + type: boolean + title: Include Deferred + occupied_slots: + type: integer + title: Occupied Slots + running_slots: + type: integer + title: Running Slots + queued_slots: + type: integer + title: Queued Slots + scheduled_slots: + type: integer + title: Scheduled Slots + open_slots: + type: integer + title: Open Slots + deferred_slots: + type: integer + title: Deferred Slots + type: object + required: + - name + - slots + - description + - include_deferred + - occupied_slots + - running_slots + - queued_slots + - scheduled_slots + - open_slots + - deferred_slots + title: PoolResponse + description: Pool serializer for responses. + ProviderCollectionResponse: + properties: + providers: + items: + $ref: '#/components/schemas/ProviderResponse' + type: array + title: Providers + total_entries: + type: integer + title: Total Entries + type: object + required: + - providers + - total_entries + title: ProviderCollectionResponse + description: Provider Collection serializer for responses. + ProviderResponse: + properties: + package_name: + type: string + title: Package Name + description: + type: string + title: Description + version: + type: string + title: Version + type: object + required: + - package_name + - description + - version + title: ProviderResponse + description: Provider serializer for responses. + QueuedEventCollectionResponse: + properties: + queued_events: + items: + $ref: '#/components/schemas/QueuedEventResponse' + type: array + title: Queued Events + total_entries: + type: integer + title: Total Entries + type: object + required: + - queued_events + - total_entries + title: QueuedEventCollectionResponse + description: Queued Event Collection serializer for responses. + QueuedEventResponse: + properties: + dag_id: + type: string + title: Dag Id + asset_id: + type: integer + title: Asset Id + created_at: + type: string + format: date-time + title: Created At + dag_display_name: + type: string + title: Dag Display Name + type: object + required: + - dag_id + - asset_id + - created_at + - dag_display_name + title: QueuedEventResponse + description: Queued Event serializer for responses.. + ReactAppResponse: + properties: + name: + type: string + title: Name + icon: + anyOf: + - type: string + - type: 'null' + title: Icon + icon_dark_mode: + anyOf: + - type: string + - type: 'null' + title: Icon Dark Mode + url_route: + anyOf: + - type: string + - type: 'null' + title: Url Route + category: + anyOf: + - type: string + - type: 'null' + title: Category + bundle_url: + type: string + title: Bundle Url + destination: + type: string + enum: + - nav + - dag + - dag_run + - task + - task_instance + - dashboard + title: Destination + default: nav + additionalProperties: true + type: object + required: + - name + - bundle_url + title: ReactAppResponse + description: Serializer for React App Plugin responses. + ReprocessBehavior: + type: string + enum: + - failed + - completed + - none + title: ReprocessBehavior + description: 'Internal enum for setting reprocess behavior in a backfill. + + + :meta private:' + SchedulerInfoResponse: + properties: + status: + anyOf: + - type: string + - type: 'null' + title: Status + latest_scheduler_heartbeat: + anyOf: + - type: string + - type: 'null' + title: Latest Scheduler Heartbeat + type: object + required: + - status + - latest_scheduler_heartbeat + title: SchedulerInfoResponse + description: Scheduler info serializer for responses. + StructuredLogMessage: + properties: + timestamp: + type: string + format: date-time + title: Timestamp + event: + type: string + title: Event + additionalProperties: true + type: object + required: + - event + title: StructuredLogMessage + description: An individual log message. + TaskCollectionResponse: + properties: + tasks: + items: + $ref: '#/components/schemas/TaskResponse' + type: array + title: Tasks + total_entries: + type: integer + title: Total Entries + type: object + required: + - tasks + - total_entries + title: TaskCollectionResponse + description: Task collection serializer for responses. + TaskDependencyCollectionResponse: + properties: + dependencies: + items: + $ref: '#/components/schemas/TaskDependencyResponse' + type: array + title: Dependencies + type: object + required: + - dependencies + title: TaskDependencyCollectionResponse + description: Task scheduling dependencies collection serializer for responses. + TaskDependencyResponse: + properties: + name: + type: string + title: Name + reason: + type: string + title: Reason + type: object + required: + - name + - reason + title: TaskDependencyResponse + description: Task Dependency serializer for responses. + TaskInletAssetReference: + properties: + dag_id: + type: string + title: Dag Id + task_id: + type: string + title: Task Id + created_at: + type: string + format: date-time + title: Created At + updated_at: + type: string + format: date-time + title: Updated At + additionalProperties: false + type: object + required: + - dag_id + - task_id + - created_at + - updated_at + title: TaskInletAssetReference + description: Task inlet reference serializer for assets. + TaskInstanceCollectionResponse: + properties: + task_instances: + items: + $ref: '#/components/schemas/TaskInstanceResponse' + type: array + title: Task Instances + total_entries: + type: integer + title: Total Entries + type: object + required: + - task_instances + - total_entries + title: TaskInstanceCollectionResponse + description: Task Instance Collection serializer for responses. + TaskInstanceHistoryCollectionResponse: + properties: + task_instances: + items: + $ref: '#/components/schemas/TaskInstanceHistoryResponse' + type: array + title: Task Instances + total_entries: + type: integer + title: Total Entries + type: object + required: + - task_instances + - total_entries + title: TaskInstanceHistoryCollectionResponse + description: TaskInstanceHistory Collection serializer for responses. + TaskInstanceHistoryResponse: + properties: + task_id: + type: string + title: Task Id + dag_id: + type: string + title: Dag Id + dag_run_id: + type: string + title: Dag Run Id + map_index: + type: integer + title: Map Index + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + duration: + anyOf: + - type: number + - type: 'null' + title: Duration + state: + anyOf: + - $ref: '#/components/schemas/TaskInstanceState' + - type: 'null' + try_number: + type: integer + title: Try Number + max_tries: + type: integer + title: Max Tries + task_display_name: + type: string + title: Task Display Name + dag_display_name: + type: string + title: Dag Display Name + hostname: + anyOf: + - type: string + - type: 'null' + title: Hostname + unixname: + anyOf: + - type: string + - type: 'null' + title: Unixname + pool: + type: string + title: Pool + pool_slots: + type: integer + title: Pool Slots + queue: + anyOf: + - type: string + - type: 'null' + title: Queue + priority_weight: + anyOf: + - type: integer + - type: 'null' + title: Priority Weight + operator: + anyOf: + - type: string + - type: 'null' + title: Operator + queued_when: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Queued When + scheduled_when: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Scheduled When + pid: + anyOf: + - type: integer + - type: 'null' + title: Pid + executor: + anyOf: + - type: string + - type: 'null' + title: Executor + executor_config: + type: string + title: Executor Config + dag_version: + anyOf: + - $ref: '#/components/schemas/DagVersionResponse' + - type: 'null' + type: object + required: + - task_id + - dag_id + - dag_run_id + - map_index + - start_date + - end_date + - duration + - state + - try_number + - max_tries + - task_display_name + - dag_display_name + - hostname + - unixname + - pool + - pool_slots + - queue + - priority_weight + - operator + - queued_when + - scheduled_when + - pid + - executor + - executor_config + - dag_version + title: TaskInstanceHistoryResponse + description: TaskInstanceHistory serializer for responses. + TaskInstanceResponse: + properties: + id: + type: string + title: Id + task_id: + type: string + title: Task Id + dag_id: + type: string + title: Dag Id + dag_run_id: + type: string + title: Dag Run Id + map_index: + type: integer + title: Map Index + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + run_after: + type: string + format: date-time + title: Run After + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + duration: + anyOf: + - type: number + - type: 'null' + title: Duration + state: + anyOf: + - $ref: '#/components/schemas/TaskInstanceState' + - type: 'null' + try_number: + type: integer + title: Try Number + max_tries: + type: integer + title: Max Tries + task_display_name: + type: string + title: Task Display Name + dag_display_name: + type: string + title: Dag Display Name + hostname: + anyOf: + - type: string + - type: 'null' + title: Hostname + unixname: + anyOf: + - type: string + - type: 'null' + title: Unixname + pool: + type: string + title: Pool + pool_slots: + type: integer + title: Pool Slots + queue: + anyOf: + - type: string + - type: 'null' + title: Queue + priority_weight: + anyOf: + - type: integer + - type: 'null' + title: Priority Weight + operator: + anyOf: + - type: string + - type: 'null' + title: Operator + queued_when: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Queued When + scheduled_when: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Scheduled When + pid: + anyOf: + - type: integer + - type: 'null' + title: Pid + executor: + anyOf: + - type: string + - type: 'null' + title: Executor + executor_config: + type: string + title: Executor Config + note: + anyOf: + - type: string + - type: 'null' + title: Note + rendered_map_index: + anyOf: + - type: string + - type: 'null' + title: Rendered Map Index + rendered_fields: + additionalProperties: true + type: object + title: Rendered Fields + trigger: + anyOf: + - $ref: '#/components/schemas/TriggerResponse' + - type: 'null' + triggerer_job: + anyOf: + - $ref: '#/components/schemas/JobResponse' + - type: 'null' + dag_version: + anyOf: + - $ref: '#/components/schemas/DagVersionResponse' + - type: 'null' + type: object + required: + - id + - task_id + - dag_id + - dag_run_id + - map_index + - logical_date + - run_after + - start_date + - end_date + - duration + - state + - try_number + - max_tries + - task_display_name + - dag_display_name + - hostname + - unixname + - pool + - pool_slots + - queue + - priority_weight + - operator + - queued_when + - scheduled_when + - pid + - executor + - executor_config + - note + - rendered_map_index + - trigger + - triggerer_job + - dag_version + title: TaskInstanceResponse + description: TaskInstance serializer for responses. + TaskInstanceState: + type: string + enum: + - removed + - scheduled + - queued + - running + - success + - restarting + - failed + - up_for_retry + - up_for_reschedule + - upstream_failed + - skipped + - deferred + title: TaskInstanceState + description: 'All possible states that a Task Instance can be in. + + + Note that None is also allowed, so always use this in a type hint with Optional.' + TaskInstancesBatchBody: + properties: + dag_ids: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Dag Ids + dag_run_ids: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Dag Run Ids + task_ids: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Task Ids + state: + anyOf: + - items: + anyOf: + - $ref: '#/components/schemas/TaskInstanceState' + - type: 'null' + type: array + - type: 'null' + title: State + run_after_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Gte + run_after_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After Lte + logical_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Gte + logical_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Lte + start_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Gte + start_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date Lte + end_date_gte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Gte + end_date_lte: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date Lte + duration_gte: + anyOf: + - type: number + - type: 'null' + title: Duration Gte + duration_lte: + anyOf: + - type: number + - type: 'null' + title: Duration Lte + pool: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Pool + queue: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Queue + executor: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Executor + page_offset: + type: integer + minimum: 0.0 + title: Page Offset + default: 0 + page_limit: + type: integer + minimum: 0.0 + title: Page Limit + default: 100 + order_by: + anyOf: + - type: string + - type: 'null' + title: Order By + additionalProperties: false + type: object + title: TaskInstancesBatchBody + description: Task Instance body for get batch. + TaskInstancesLogResponse: + properties: + content: + anyOf: + - items: + $ref: '#/components/schemas/StructuredLogMessage' + type: array + - items: + type: string + type: array + title: Content + continuation_token: + anyOf: + - type: string + - type: 'null' + title: Continuation Token + type: object + required: + - content + - continuation_token + title: TaskInstancesLogResponse + description: Log serializer for responses. + TaskOutletAssetReference: + properties: + dag_id: + type: string + title: Dag Id + task_id: + type: string + title: Task Id + created_at: + type: string + format: date-time + title: Created At + updated_at: + type: string + format: date-time + title: Updated At + additionalProperties: false + type: object + required: + - dag_id + - task_id + - created_at + - updated_at + title: TaskOutletAssetReference + description: Task outlet reference serializer for assets. + TaskResponse: + properties: + task_id: + anyOf: + - type: string + - type: 'null' + title: Task Id + task_display_name: + anyOf: + - type: string + - type: 'null' + title: Task Display Name + owner: + anyOf: + - type: string + - type: 'null' + title: Owner + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + trigger_rule: + anyOf: + - type: string + - type: 'null' + title: Trigger Rule + depends_on_past: + type: boolean + title: Depends On Past + wait_for_downstream: + type: boolean + title: Wait For Downstream + retries: + anyOf: + - type: number + - type: 'null' + title: Retries + queue: + anyOf: + - type: string + - type: 'null' + title: Queue + pool: + anyOf: + - type: string + - type: 'null' + title: Pool + pool_slots: + anyOf: + - type: number + - type: 'null' + title: Pool Slots + execution_timeout: + anyOf: + - $ref: '#/components/schemas/TimeDelta' + - type: 'null' + retry_delay: + anyOf: + - $ref: '#/components/schemas/TimeDelta' + - type: 'null' + retry_exponential_backoff: + type: boolean + title: Retry Exponential Backoff + priority_weight: + anyOf: + - type: number + - type: 'null' + title: Priority Weight + weight_rule: + anyOf: + - type: string + - type: 'null' + title: Weight Rule + ui_color: + anyOf: + - type: string + - type: 'null' + title: Ui Color + ui_fgcolor: + anyOf: + - type: string + - type: 'null' + title: Ui Fgcolor + template_fields: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Template Fields + downstream_task_ids: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Downstream Task Ids + doc_md: + anyOf: + - type: string + - type: 'null' + title: Doc Md + operator_name: + anyOf: + - type: string + - type: 'null' + title: Operator Name + params: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Params + class_ref: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Class Ref + is_mapped: + anyOf: + - type: boolean + - type: 'null' + title: Is Mapped + extra_links: + items: + type: string + type: array + title: Extra Links + description: Extract and return extra_links. + readOnly: true + type: object + required: + - task_id + - task_display_name + - owner + - start_date + - end_date + - trigger_rule + - depends_on_past + - wait_for_downstream + - retries + - queue + - pool + - pool_slots + - execution_timeout + - retry_delay + - retry_exponential_backoff + - priority_weight + - weight_rule + - ui_color + - ui_fgcolor + - template_fields + - downstream_task_ids + - doc_md + - operator_name + - params + - class_ref + - is_mapped + - extra_links + title: TaskResponse + description: Task serializer for responses. + TimeDelta: + properties: + __type: + type: string + title: Type + default: TimeDelta + days: + type: integer + title: Days + seconds: + type: integer + title: Seconds + microseconds: + type: integer + title: Microseconds + type: object + required: + - days + - seconds + - microseconds + title: TimeDelta + description: TimeDelta can be used to interact with datetime.timedelta objects. + TriggerDAGRunPostBody: + properties: + dag_run_id: + anyOf: + - type: string + - type: 'null' + title: Dag Run Id + data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval Start + data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval End + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + run_after: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Run After + conf: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Conf + note: + anyOf: + - type: string + - type: 'null' + title: Note + additionalProperties: false + type: object + required: + - logical_date + title: TriggerDAGRunPostBody + description: Trigger DAG Run Serializer for POST body. + TriggerResponse: + properties: + id: + type: integer + title: Id + classpath: + type: string + title: Classpath + kwargs: + type: string + title: Kwargs + created_date: + type: string + format: date-time + title: Created Date + triggerer_id: + anyOf: + - type: integer + - type: 'null' + title: Triggerer Id + type: object + required: + - id + - classpath + - kwargs + - created_date + - triggerer_id + title: TriggerResponse + description: Trigger serializer for responses. + TriggererInfoResponse: + properties: + status: + anyOf: + - type: string + - type: 'null' + title: Status + latest_triggerer_heartbeat: + anyOf: + - type: string + - type: 'null' + title: Latest Triggerer Heartbeat + type: object + required: + - status + - latest_triggerer_heartbeat + title: TriggererInfoResponse + description: Triggerer info serializer for responses. + UpdateHITLDetailPayload: + properties: + chosen_options: + items: + type: string + type: array + minItems: 1 + title: Chosen Options + params_input: + additionalProperties: true + type: object + title: Params Input + type: object + required: + - chosen_options + title: UpdateHITLDetailPayload + description: Schema for updating the content of a Human-in-the-loop detail. + ValidationError: + properties: + loc: + items: + anyOf: + - type: string + - type: integer + type: array + title: Location + msg: + type: string + title: Message + type: + type: string + title: Error Type + type: object + required: + - loc + - msg + - type + title: ValidationError + VariableBody: + properties: + key: + type: string + maxLength: 250 + title: Key + value: + $ref: '#/components/schemas/JsonValue' + description: + anyOf: + - type: string + - type: 'null' + title: Description + additionalProperties: false + type: object + required: + - key + - value + title: VariableBody + description: Variable serializer for bodies. + VariableCollectionResponse: + properties: + variables: + items: + $ref: '#/components/schemas/VariableResponse' + type: array + title: Variables + total_entries: + type: integer + title: Total Entries + type: object + required: + - variables + - total_entries + title: VariableCollectionResponse + description: Variable Collection serializer for responses. + VariableResponse: + properties: + key: + type: string + title: Key + value: + type: string + title: Value + description: + anyOf: + - type: string + - type: 'null' + title: Description + is_encrypted: + type: boolean + title: Is Encrypted + type: object + required: + - key + - value + - description + - is_encrypted + title: VariableResponse + description: Variable serializer for responses. + VersionInfo: + properties: + version: + type: string + title: Version + git_version: + anyOf: + - type: string + - type: 'null' + title: Git Version + type: object + required: + - version + - git_version + title: VersionInfo + description: Version information serializer for responses. + XComCollectionResponse: + properties: + xcom_entries: + items: + $ref: '#/components/schemas/XComResponse' + type: array + title: Xcom Entries + total_entries: + type: integer + title: Total Entries + type: object + required: + - xcom_entries + - total_entries + title: XComCollectionResponse + description: XCom Collection serializer for responses. + XComCreateBody: + properties: + key: + type: string + title: Key + value: + title: Value + map_index: + type: integer + title: Map Index + default: -1 + additionalProperties: false + type: object + required: + - key + - value + title: XComCreateBody + description: Payload serializer for creating an XCom entry. + XComResponse: + properties: + key: + type: string + title: Key + timestamp: + type: string + format: date-time + title: Timestamp + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + map_index: + type: integer + title: Map Index + task_id: + type: string + title: Task Id + dag_id: + type: string + title: Dag Id + run_id: + type: string + title: Run Id + dag_display_name: + type: string + title: Dag Display Name + type: object + required: + - key + - timestamp + - logical_date + - map_index + - task_id + - dag_id + - run_id + - dag_display_name + title: XComResponse + description: Serializer for a xcom item. + XComResponseNative: + properties: + key: + type: string + title: Key + timestamp: + type: string + format: date-time + title: Timestamp + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + map_index: + type: integer + title: Map Index + task_id: + type: string + title: Task Id + dag_id: + type: string + title: Dag Id + run_id: + type: string + title: Run Id + dag_display_name: + type: string + title: Dag Display Name + value: + title: Value + type: object + required: + - key + - timestamp + - logical_date + - map_index + - task_id + - dag_id + - run_id + - dag_display_name + - value + title: XComResponseNative + description: XCom response serializer with native return type. + XComResponseString: + properties: + key: + type: string + title: Key + timestamp: + type: string + format: date-time + title: Timestamp + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + map_index: + type: integer + title: Map Index + task_id: + type: string + title: Task Id + dag_id: + type: string + title: Dag Id + run_id: + type: string + title: Run Id + dag_display_name: + type: string + title: Dag Display Name + value: + anyOf: + - type: string + - type: 'null' + title: Value + type: object + required: + - key + - timestamp + - logical_date + - map_index + - task_id + - dag_id + - run_id + - dag_display_name + - value + title: XComResponseString + description: XCom response serializer with string return type. + XComUpdateBody: + properties: + value: + title: Value + map_index: + type: integer + title: Map Index + default: -1 + additionalProperties: false + type: object + required: + - value + title: XComUpdateBody + description: Payload serializer for updating an XCom entry. + securitySchemes: + OAuth2PasswordBearer: + type: oauth2 + description: To authenticate Airflow API requests, clients must include a JWT + (JSON Web Token) in the Authorization header of each request. This token is + used to verify the identity of the client and ensure that they have the appropriate + permissions to access the requested resources. You can use the endpoint ``POST + /auth/token`` in order to generate a JWT token. Upon successful authentication, + the server will issue a JWT token that contains the necessary information + (such as user identity and scope) to authenticate subsequent requests. To + learn more about Airflow public API authentication, please read https://airflow.apache.org/docs/apache-airflow/stable/security/api.html. + flows: + password: + scopes: {} + tokenUrl: /auth/token + HTTPBearer: + type: http + scheme: bearer diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py index fbbfb46dfa8d0..6db86ce2327a6 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -37,6 +37,7 @@ from airflow.api_fastapi.core_api.routes.public.dags import dags_router from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router from airflow.api_fastapi.core_api.routes.public.extra_links import extra_links_router +from airflow.api_fastapi.core_api.routes.public.hitl import hitl_router from airflow.api_fastapi.core_api.routes.public.import_error import import_error_router from airflow.api_fastapi.core_api.routes.public.job import job_router from airflow.api_fastapi.core_api.routes.public.log import task_instances_log_router @@ -83,6 +84,7 @@ authenticated_router.include_router(dag_parsing_router) authenticated_router.include_router(dag_tags_router) authenticated_router.include_router(dag_versions_router) +authenticated_router.include_router(hitl_router) # Include authenticated router in public router diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/assets.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/assets.py index f22872066648b..3a49670e878cc 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/assets.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/assets.py @@ -20,10 +20,12 @@ from datetime import datetime from typing import TYPE_CHECKING, Annotated -from fastapi import Depends, HTTPException, Request, status -from sqlalchemy import delete, select +from fastapi import Depends, HTTPException, status +from sqlalchemy import and_, delete, func, select from sqlalchemy.orm import joinedload, subqueryload +from airflow._shared.timezones import timezone +from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.parameters import ( BaseParam, @@ -55,6 +57,7 @@ from airflow.api_fastapi.core_api.datamodels.dag_run import DAGRunResponse from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import ( + GetUserDep, ReadableDagsFilterDep, requires_access_asset, requires_access_asset_alias, @@ -69,8 +72,6 @@ AssetModel, TaskOutletAssetReference, ) -from airflow.models.dag import DAG -from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -135,8 +136,40 @@ def get_assets( session: SessionDep, ) -> AssetCollectionResponse: """Get assets.""" + # Build a query that will be used to retrieve the ID and timestamp of the latest AssetEvent + last_asset_events = ( + select(AssetEvent.asset_id, func.max(AssetEvent.timestamp).label("last_timestamp")) + .group_by(AssetEvent.asset_id) + .subquery() + ) + + # First, we're pulling the Asset ID, AssetEvent ID, and AssetEvent timestamp for the latest (last) + # AssetEvent. We'll eventually OUTER JOIN this to the AssetModel + asset_event_query = ( + select( + AssetEvent.asset_id, # The ID of the Asset, which we'll need to JOIN to the AssetModel + func.max(AssetEvent.id).label("last_asset_event_id"), # The ID of the last AssetEvent + func.max(AssetEvent.timestamp).label("last_asset_event_timestamp"), + ) + .join( + last_asset_events, + and_( + AssetEvent.asset_id == last_asset_events.c.asset_id, + AssetEvent.timestamp == last_asset_events.c.last_timestamp, + ), + ) + .group_by(AssetEvent.asset_id) + .subquery() + ) + + assets_select_statement = select( + AssetModel, + asset_event_query.c.last_asset_event_id, # This should be the AssetEvent.id + asset_event_query.c.last_asset_event_timestamp, + ).outerjoin(asset_event_query, AssetModel.id == asset_event_query.c.asset_id) + assets_select, total_entries = paginated_select( - statement=select(AssetModel), + statement=assets_select_statement, filters=[only_active, name_pattern, uri_pattern, dag_ids], order_by=order_by, offset=offset, @@ -144,11 +177,29 @@ def get_assets( session=session, ) - assets = session.scalars( + assets_rows = session.execute( assets_select.options( - subqueryload(AssetModel.consuming_dags), subqueryload(AssetModel.producing_tasks) + subqueryload(AssetModel.scheduled_dags), + subqueryload(AssetModel.producing_tasks), + subqueryload(AssetModel.consuming_tasks), ) ) + + assets = [] + + for asset, last_asset_event_id, last_asset_event_timestamp in assets_rows: + asset_response = AssetResponse.model_validate( + { + **asset.__dict__, + "aliases": asset.aliases, + "last_asset_event": { + "id": last_asset_event_id, + "timestamp": last_asset_event_timestamp, + }, + } + ) + assets.append(asset_response) + return AssetCollectionResponse( assets=assets, total_entries=total_entries, @@ -296,7 +347,8 @@ def create_asset_event( ) def materialize_asset( asset_id: int, - request: Request, + dag_bag: DagBagDep, + user: GetUserDep, session: SessionDep, ) -> DAGRunResponse: """Materialize an asset by triggering a DAG run that produces it.""" @@ -317,9 +369,7 @@ def materialize_asset( f"More than one DAG materializes asset with ID: {asset_id}", ) - dag: DAG | None - if not (dag := request.app.state.dag_bag.get_dag(dag_id)): - raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with ID `{dag_id}` was not found") + dag = get_latest_version_of_dag(dag_bag, dag_id, session) return dag.create_dagrun( run_id=dag.timetable.generate_run_id( @@ -330,6 +380,7 @@ def materialize_asset( run_after=run_after, run_type=DagRunType.MANUAL, triggered_by=DagRunTriggeredByType.REST_API, + triggering_user_name=user.get_name(), state=DagRunState.QUEUED, session=session, ) @@ -350,7 +401,7 @@ def get_asset_queued_events( where_clause = _generate_queued_event_where_clause( asset_id=asset_id, before=before, permitted_dag_ids=readable_dags_filter.value ) - query = select(AssetDagRunQueue).where(*where_clause) + query = select(AssetDagRunQueue).where(*where_clause).options(joinedload(AssetDagRunQueue.dag_model)) dag_asset_queued_events_select, total_entries = paginated_select(statement=query) adrqs = session.scalars(dag_asset_queued_events_select).all() @@ -362,7 +413,12 @@ def get_asset_queued_events( ) queued_events = [ - QueuedEventResponse(created_at=adrq.created_at, dag_id=adrq.target_dag_id, asset_id=adrq.asset_id) + QueuedEventResponse( + created_at=adrq.created_at, + dag_id=adrq.target_dag_id, + asset_id=adrq.asset_id, + dag_display_name=adrq.dag_model.dag_display_name, + ) for adrq in adrqs ] @@ -385,16 +441,45 @@ def get_asset( session: SessionDep, ) -> AssetResponse: """Get an asset.""" + # Build a subquery to be used to retrieve the latest AssetEvent by matching timestamp + last_asset_event = ( + select(func.max(AssetEvent.timestamp)).where(AssetEvent.asset_id == asset_id).scalar_subquery() + ) + + # Now, find the latest AssetEvent details using the subquery from above + asset_event_rows = session.execute( + select(AssetEvent.asset_id, AssetEvent.id, AssetEvent.timestamp).where( + AssetEvent.asset_id == asset_id, AssetEvent.timestamp == last_asset_event + ) + ).one_or_none() + + # Retrieve the Asset; there should only be one for that asset_id asset = session.scalar( select(AssetModel) .where(AssetModel.id == asset_id) - .options(joinedload(AssetModel.consuming_dags), joinedload(AssetModel.producing_tasks)) + .options( + joinedload(AssetModel.scheduled_dags), + joinedload(AssetModel.producing_tasks), + joinedload(AssetModel.consuming_tasks), + ) ) + last_asset_event_id = asset_event_rows[1] if asset_event_rows else None + last_asset_event_timestamp = asset_event_rows[2] if asset_event_rows else None + if asset is None: raise HTTPException(status.HTTP_404_NOT_FOUND, f"The Asset with ID: `{asset_id}` was not found") - return AssetResponse.model_validate(asset) + return AssetResponse.model_validate( + { + **asset.__dict__, + "aliases": asset.aliases, + "last_asset_event": { + "id": last_asset_event_id, + "timestamp": last_asset_event_timestamp, + }, + } + ) @assets_router.get( @@ -412,7 +497,7 @@ def get_dag_asset_queued_events( where_clause = _generate_queued_event_where_clause( dag_id=dag_id, before=before, permitted_dag_ids=readable_dags_filter.value ) - query = select(AssetDagRunQueue).where(*where_clause) + query = select(AssetDagRunQueue).where(*where_clause).options(joinedload(AssetDagRunQueue.dag_model)) dag_asset_queued_events_select, total_entries = paginated_select(statement=query) adrqs = session.scalars(dag_asset_queued_events_select).all() @@ -420,7 +505,12 @@ def get_dag_asset_queued_events( raise HTTPException(status.HTTP_404_NOT_FOUND, f"Queue event with dag_id: `{dag_id}` was not found") queued_events = [ - QueuedEventResponse(created_at=adrq.created_at, dag_id=adrq.target_dag_id, asset_id=adrq.asset_id) + QueuedEventResponse( + created_at=adrq.created_at, + dag_id=adrq.target_dag_id, + asset_id=adrq.asset_id, + dag_display_name=adrq.dag_model.dag_display_name, + ) for adrq in adrqs ] @@ -454,7 +544,12 @@ def get_dag_asset_queued_event( f"Queued event with dag_id: `{dag_id}` and asset_id: `{asset_id}` was not found", ) - return QueuedEventResponse(created_at=adrq.created_at, dag_id=adrq.target_dag_id, asset_id=asset_id) + return QueuedEventResponse( + created_at=adrq.created_at, + dag_id=adrq.target_dag_id, + asset_id=asset_id, + dag_display_name=adrq.dag_model.dag_display_name, + ) @assets_router.delete( diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/auth.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/auth.py index b8f6d204d2ed1..2e3a215f1753b 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/auth.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/auth.py @@ -22,6 +22,7 @@ from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import is_safe_url +from airflow.configuration import conf auth_router = AirflowRouter(tags=["Login"], prefix="/auth") @@ -55,3 +56,23 @@ def logout(request: Request, next: None | str = None) -> RedirectResponse: logout_url = request.app.state.auth_manager.get_url_login() return RedirectResponse(logout_url) + + +@auth_router.get( + "/refresh", + responses=create_openapi_http_exception_doc([status.HTTP_307_TEMPORARY_REDIRECT]), +) +def refresh(request: Request, next: None | str = None) -> RedirectResponse: + """Refresh the authentication token.""" + refresh_url = request.app.state.auth_manager.get_url_refresh() + + if not refresh_url: + return RedirectResponse(f"{conf.get('api', 'base_url', fallback='/')}auth/logout") + + if next and not is_safe_url(next, request=request): + raise HTTPException(status_code=400, detail="Invalid or unsafe next URL") + + if next: + refresh_url += f"?next={next}" + + return RedirectResponse(refresh_url) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py index e4995cb36c3d2..30efea4d79ab8 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/backfills.py @@ -20,13 +20,15 @@ from fastapi import Depends, HTTPException, status from fastapi.exceptions import RequestValidationError +from pydantic import NonNegativeInt from sqlalchemy import select, update +from sqlalchemy.orm import joinedload +from airflow._shared.timezones import timezone from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity from airflow.api_fastapi.common.db.common import ( - AsyncSessionDep, SessionDep, - paginated_select_async, + paginated_select, ) from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset, SortParam from airflow.api_fastapi.common.router import AirflowRouter @@ -40,7 +42,7 @@ from airflow.api_fastapi.core_api.openapi.exceptions import ( create_openapi_http_exception_doc, ) -from airflow.api_fastapi.core_api.security import requires_access_backfill, requires_access_dag +from airflow.api_fastapi.core_api.security import GetUserDep, requires_access_backfill, requires_access_dag from airflow.api_fastapi.logging.decorators import action_logging from airflow.exceptions import DagNotFound from airflow.models import DagRun @@ -55,7 +57,6 @@ _create_backfill, _do_dry_run, ) -from airflow.utils import timezone from airflow.utils.state import DagRunState backfills_router = AirflowRouter(tags=["Backfill"], prefix="/backfills") @@ -67,7 +68,7 @@ Depends(requires_access_backfill(method="GET")), ], ) -async def list_backfills( +def list_backfills( dag_id: str, limit: QueryLimit, offset: QueryOffset, @@ -75,16 +76,16 @@ async def list_backfills( SortParam, Depends(SortParam(["id"], Backfill).dynamic_depends()), ], - session: AsyncSessionDep, + session: SessionDep, ) -> BackfillCollectionResponse: - select_stmt, total_entries = await paginated_select_async( - statement=select(Backfill).where(Backfill.dag_id == dag_id), + select_stmt, total_entries = paginated_select( + statement=select(Backfill).where(Backfill.dag_id == dag_id).options(joinedload(Backfill.dag_model)), order_by=order_by, offset=offset, limit=limit, session=session, ) - backfills = await session.scalars(select_stmt) + backfills = session.scalars(select_stmt) return BackfillCollectionResponse( backfills=backfills, total_entries=total_entries, @@ -99,10 +100,12 @@ async def list_backfills( ], ) def get_backfill( - backfill_id: str, + backfill_id: NonNegativeInt, session: SessionDep, ) -> BackfillResponse: - backfill = session.get(Backfill, backfill_id) + backfill = session.scalars( + select(Backfill).where(Backfill.id == backfill_id).options(joinedload(Backfill.dag_model)) + ).one_or_none() if backfill: return backfill raise HTTPException(status.HTTP_404_NOT_FOUND, "Backfill not found") @@ -122,8 +125,10 @@ def get_backfill( Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.RUN)), ], ) -def pause_backfill(backfill_id, session: SessionDep) -> BackfillResponse: - b = session.get(Backfill, backfill_id) +def pause_backfill(backfill_id: NonNegativeInt, session: SessionDep) -> BackfillResponse: + b = session.scalars( + select(Backfill).where(Backfill.id == backfill_id).options(joinedload(Backfill.dag_model)) + ).one_or_none() if not b: raise HTTPException(status.HTTP_404_NOT_FOUND, f"Could not find backfill with id {backfill_id}") if b.completed_at: @@ -148,8 +153,10 @@ def pause_backfill(backfill_id, session: SessionDep) -> BackfillResponse: Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.RUN)), ], ) -def unpause_backfill(backfill_id, session: SessionDep) -> BackfillResponse: - b = session.get(Backfill, backfill_id) +def unpause_backfill(backfill_id: NonNegativeInt, session: SessionDep) -> BackfillResponse: + b = session.scalars( + select(Backfill).where(Backfill.id == backfill_id).options(joinedload(Backfill.dag_model)) + ).one_or_none() if not b: raise HTTPException(status.HTTP_404_NOT_FOUND, f"Could not find backfill with id {backfill_id}") if b.completed_at: @@ -173,8 +180,10 @@ def unpause_backfill(backfill_id, session: SessionDep) -> BackfillResponse: Depends(requires_access_backfill(method="PUT")), ], ) -def cancel_backfill(backfill_id, session: SessionDep) -> BackfillResponse: - b: Backfill = session.get(Backfill, backfill_id) +def cancel_backfill(backfill_id: NonNegativeInt, session: SessionDep) -> BackfillResponse: + b = session.scalars( + select(Backfill).where(Backfill.id == backfill_id).options(joinedload(Backfill.dag_model)) + ).one_or_none() if not b: raise HTTPException(status.HTTP_404_NOT_FOUND, f"Could not find backfill with id {backfill_id}") if b.completed_at is not None: @@ -220,6 +229,7 @@ def cancel_backfill(backfill_id, session: SessionDep) -> BackfillResponse: ) def create_backfill( backfill_request: BackfillPostBody, + user: GetUserDep, ) -> BackfillResponse: from_date = timezone.coerce_datetime(backfill_request.from_date) to_date = timezone.coerce_datetime(backfill_request.to_date) @@ -231,6 +241,7 @@ def create_backfill( max_active_runs=backfill_request.max_active_runs, reverse=backfill_request.run_backwards, dag_run_conf=backfill_request.dag_run_conf, + triggering_user_name=user.get_name(), reprocess_behavior=backfill_request.reprocess_behavior, ) return BackfillResponse.model_validate(backfill_obj) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/config.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/config.py index 1df1582591581..784d652c155f2 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/config.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/config.py @@ -19,7 +19,6 @@ import textwrap from fastapi import Depends, HTTPException, status -from fastapi.responses import Response from airflow.api_fastapi.common.headers import HeaderAcceptJsonOrText from airflow.api_fastapi.common.router import AirflowRouter @@ -31,6 +30,10 @@ ) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import requires_access_configuration +from airflow.api_fastapi.core_api.services.public.config import ( + _check_expose_config, + _response_based_on_accept, +) from airflow.configuration import conf text_example_response_for_get_config_value = { @@ -66,31 +69,6 @@ }, } } - - -def _check_expose_config() -> bool: - display_sensitive: bool | None = None - if conf.get("webserver", "expose_config").lower() == "non-sensitive-only": - expose_config = True - display_sensitive = False - else: - expose_config = conf.getboolean("webserver", "expose_config") - display_sensitive = True - - if not expose_config: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Your Airflow administrator chose not to expose the configuration, most likely for security reasons.", - ) - return display_sensitive - - -def _response_based_on_accept(accept: Mimetype, config: Config): - if accept == Mimetype.TEXT: - return Response(content=config.text_format, media_type=Mimetype.TEXT) - return config - - config_router = AirflowRouter(tags=["Config"], prefix="/config") diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py index 7b12593994f7a..b41292ca39df4 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py @@ -17,13 +17,16 @@ from __future__ import annotations +import textwrap from typing import Annotated, Literal, cast import structlog -from fastapi import Depends, HTTPException, Query, Request, status +from fastapi import Depends, HTTPException, Query, status from fastapi.exceptions import RequestValidationError +from fastapi.responses import StreamingResponse from pydantic import ValidationError from sqlalchemy import select +from sqlalchemy.orm import joinedload from airflow.api.common.mark_tasks import ( set_dag_run_state_to_failed, @@ -31,6 +34,7 @@ set_dag_run_state_to_success, ) from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.dagbag import DagBagDep, get_dag_for_run, get_latest_version_of_dag from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.parameters import ( FilterOptionEnum, @@ -44,9 +48,12 @@ Range, RangeFilter, SortParam, + _SearchParam, datetime_range_filter_factory, + search_param_factory, ) from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.common.types import Mimetype from airflow.api_fastapi.core_api.datamodels.assets import AssetEventCollectionResponse from airflow.api_fastapi.core_api.datamodels.dag_run import ( DAGRunClearBody, @@ -68,10 +75,11 @@ requires_access_asset, requires_access_dag, ) +from airflow.api_fastapi.core_api.services.public.dag_run import DagRunWaiter from airflow.api_fastapi.logging.decorators import action_logging from airflow.exceptions import ParamValidationError from airflow.listeners.listener import get_listener_manager -from airflow.models import DAG, DagModel, DagRun +from airflow.models import DagModel, DagRun from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -90,7 +98,9 @@ dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.RUN))], ) def get_dag_run(dag_id: str, dag_run_id: str, session: SessionDep) -> DAGRunResponse: - dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) + dag_run = session.scalar( + select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id).options(joinedload(DagRun.dag_model)) + ) if dag_run is None: raise HTTPException( status.HTTP_404_NOT_FOUND, @@ -144,22 +154,21 @@ def patch_dag_run( dag_run_id: str, patch_body: DAGRunPatchBody, session: SessionDep, - request: Request, + dag_bag: DagBagDep, user: GetUserDep, update_mask: list[str] | None = Query(None), ) -> DAGRunResponse: """Modify a DAG Run.""" - dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) + dag_run = session.scalar( + select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id).options(joinedload(DagRun.dag_model)) + ) if dag_run is None: raise HTTPException( status.HTTP_404_NOT_FOUND, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found", ) - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") + dag = get_dag_for_run(dag_bag, dag_run, session=session) fields_to_update = patch_body.model_fields_set @@ -250,23 +259,28 @@ def clear_dag_run( dag_id: str, dag_run_id: str, body: DAGRunClearBody, - request: Request, + dag_bag: DagBagDep, session: SessionDep, ) -> TaskInstanceCollectionResponse | DAGRunResponse: - dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) + dag_run = session.scalar( + select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id).options(joinedload(DagRun.dag_model)) + ) if dag_run is None: raise HTTPException( status.HTTP_404_NOT_FOUND, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found", ) - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) + dag = dag_bag.get_dag_for_run(dag_run, session=session) if body.dry_run: + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") task_instances = dag.clear( run_id=dag_run_id, task_ids=None, only_failed=body.only_failed, + run_on_latest_version=body.run_on_latest_version, dry_run=True, session=session, ) @@ -275,10 +289,13 @@ def clear_dag_run( task_instances=cast("list[TaskInstanceResponse]", task_instances), total_entries=len(task_instances), ) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") dag.clear( run_id=dag_run_id, task_ids=None, only_failed=body.only_failed, + run_on_latest_version=body.run_on_latest_version, session=session, ) dag_run_cleared = session.scalar(select(DagRun).where(DagRun.id == dag_run.id)) @@ -316,6 +333,7 @@ def get_dag_runs( "end_date", "updated_at", "conf", + "duration", ], DagRun, {"dag_run_id": "run_id"}, @@ -324,7 +342,12 @@ def get_dag_runs( ], readable_dag_runs_filter: ReadableDagRunsFilterDep, session: SessionDep, - request: Request, + dag_bag: DagBagDep, + run_id_pattern: Annotated[_SearchParam, Depends(search_param_factory(DagRun.run_id, "run_id_pattern"))], + triggering_user_name_pattern: Annotated[ + _SearchParam, + Depends(search_param_factory(DagRun.triggering_user_name, "triggering_user_name_pattern")), + ], ) -> DAGRunCollectionResponse: """ Get all DAG Runs. @@ -334,11 +357,9 @@ def get_dag_runs( query = select(DagRun) if dag_id != "~": - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"The DAG with dag_id: `{dag_id}` was not found") - - query = query.filter(DagRun.dag_id == dag_id) + # Check if the DAG exists + get_latest_version_of_dag(dag_bag, dag_id, session) + query = query.filter(DagRun.dag_id == dag_id).options(joinedload(DagRun.dag_model)) dag_run_select, total_entries = paginated_select( statement=query, @@ -351,6 +372,8 @@ def get_dag_runs( state, run_type, readable_dag_runs_filter, + run_id_pattern, + triggering_user_name_pattern, ], order_by=order_by, offset=offset, @@ -382,7 +405,7 @@ def get_dag_runs( def trigger_dag_run( dag_id, body: TriggerDAGRunPostBody, - request: Request, + dag_bag: DagBagDep, user: GetUserDep, session: SessionDep, ) -> DAGRunResponse: @@ -398,7 +421,7 @@ def trigger_dag_run( ) try: - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) + dag = get_latest_version_of_dag(dag_bag, dag_id, session) params = body.validate_context(dag) dag_run = dag.create_dagrun( @@ -409,9 +432,11 @@ def trigger_dag_run( conf=params["conf"], run_type=DagRunType.MANUAL, triggered_by=DagRunTriggeredByType.REST_API, + triggering_user_name=user.get_name(), state=DagRunState.QUEUED, session=session, ) + dag_run_note = body.note if dag_run_note: current_user_id = user.get_id() @@ -423,6 +448,57 @@ def trigger_dag_run( raise HTTPException(status.HTTP_400_BAD_REQUEST, str(e)) +@dag_run_router.get( + "/{dag_run_id}/wait", + tags=["experimental"], + summary="Experimental: Wait for a dag run to complete, and return task results if requested.", + description="🚧 This is an experimental endpoint and may change or be removed without notice.", + responses={ + **create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + status.HTTP_200_OK: { + "description": "Successful Response", + "content": { + Mimetype.NDJSON: { + "schema": { + "type": "string", + "example": textwrap.dedent( + """\ + {"state": "running"} + {"state": "success", "results": {"op": 42}} + """ + ), + } + } + }, + }, + }, + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.RUN))], +) +def wait_dag_run_until_finished( + dag_id: str, + dag_run_id: str, + session: SessionDep, + interval: Annotated[float, Query(gt=0.0, description="Seconds to wait between dag run state checks")], + result_task_ids: Annotated[ + list[str] | None, + Query(alias="result", description="Collect result XCom from task. Can be set multiple times."), + ] = None, +): + "Wait for a dag run until it finishes, and return its result(s)." + if not session.scalar(select(1).where(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id)): + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found", + ) + waiter = DagRunWaiter( + dag_id=dag_id, + run_id=dag_run_id, + interval=interval, + result_task_ids=result_task_ids, + ) + return StreamingResponse(waiter.wait()) + + @dag_run_router.post( "/list", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), @@ -472,9 +548,9 @@ def get_list_dag_runs_batch( ], DagRun, {"dag_run_id": "run_id"}, - ).set_value(body.order_by) + ).set_value([body.order_by] if body.order_by else None) - base_query = select(DagRun) + base_query = select(DagRun).options(joinedload(DagRun.dag_model)) dag_runs_select, total_entries = paginated_select( statement=base_query, filters=[dag_ids, logical_date, run_after, start_date, end_date, state, readable_dag_runs_filter], diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_sources.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_sources.py index 4a4eaa02a1d2c..c44fe9740a1af 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_sources.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_sources.py @@ -73,6 +73,7 @@ def get_dag_source( dag_id=dag_id, content=dag_version.dag_code.source_code, version_number=dag_version.version_number, + dag_display_name=dag_version.dag_model.dag_display_name, ) if accept == Mimetype.TEXT: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_stats.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_stats.py index 124221571e1c0..d2b4cc17bf1d8 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_stats.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_stats.py @@ -74,15 +74,18 @@ def get_dag_stats( query_result = session.execute(dagruns_select) result_dag_ids = [] + dag_display_names: dict[str, str] = {} dag_state_data = {} - for dag_id, state, count in query_result: + for dag_id, state, dag_display_name, count in query_result: dag_state_data[(dag_id, state)] = count if dag_id not in result_dag_ids: + dag_display_names[dag_id] = dag_display_name result_dag_ids.append(dag_id) dags = [ DagStatsResponse( dag_id=dag_id, + dag_display_name=dag_display_names[dag_id], stats=[ DagStatsStateResponse( state=state, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_versions.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_versions.py index b41f203a7df88..e4f282f60880b 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_versions.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_versions.py @@ -18,10 +18,12 @@ from typing import Annotated -from fastapi import Depends, HTTPException, Request, status +from fastapi import Depends, HTTPException, status from sqlalchemy import select +from sqlalchemy.orm import joinedload from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.parameters import ( FilterParam, @@ -37,7 +39,6 @@ ) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import requires_access_dag -from airflow.models.dag import DAG from airflow.models.dag_version import DagVersion dag_versions_router = AirflowRouter(tags=["DagVersion"], prefix="/dags/{dag_id}/dagVersions") @@ -58,7 +59,11 @@ def get_dag_version( session: SessionDep, ) -> DagVersionResponse: """Get one Dag Version.""" - dag_version = session.scalar(select(DagVersion).filter_by(dag_id=dag_id, version_number=version_number)) + dag_version = session.scalar( + select(DagVersion) + .filter_by(dag_id=dag_id, version_number=version_number) + .options(joinedload(DagVersion.dag_model)) + ) if dag_version is None: raise HTTPException( @@ -80,10 +85,9 @@ def get_dag_version( ) def get_dag_versions( dag_id: str, + session: SessionDep, limit: QueryLimit, offset: QueryOffset, - session: SessionDep, - request: Request, version_number: Annotated[ FilterParam[int], Depends(filter_param_factory(DagVersion.version_number, int)) ], @@ -97,19 +101,17 @@ def get_dag_versions( SortParam(["id", "version_number", "bundle_name", "bundle_version"], DagVersion).dynamic_depends() ), ], + dag_bag: DagBagDep, ) -> DAGVersionCollectionResponse: """ Get all DAG Versions. This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. """ - query = select(DagVersion) + query = select(DagVersion).options(joinedload(DagVersion.dag_model)) if dag_id != "~": - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"The DAG with dag_id: `{dag_id}` was not found") - + get_latest_version_of_dag(dag_bag, dag_id, session) query = query.filter(DagVersion.dag_id == dag_id) dag_versions_select, total_entries = paginated_select( diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py index a70034e835975..2af6073d60586 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py @@ -19,12 +19,13 @@ from typing import Annotated -from fastapi import Depends, HTTPException, Query, Request, Response, status +from fastapi import Depends, HTTPException, Query, Response, status from fastapi.exceptions import RequestValidationError from pydantic import ValidationError -from sqlalchemy import select, update +from sqlalchemy import delete, insert, select, update from airflow.api.common import delete_dag as delete_dag_module +from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag from airflow.api_fastapi.common.db.common import ( SessionDep, paginated_select, @@ -33,10 +34,13 @@ from airflow.api_fastapi.common.parameters import ( FilterOptionEnum, FilterParam, + QueryBundleNameFilter, + QueryBundleVersionFilter, QueryDagDisplayNamePatternSearch, QueryDagIdPatternSearch, QueryDagIdPatternSearchWithNone, QueryExcludeStaleFilter, + QueryFavoriteFilter, QueryLastDagRunStateFilter, QueryLimit, QueryOffset, @@ -59,12 +63,14 @@ from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import ( EditableDagsFilterDep, + GetUserDep, ReadableDagsFilterDep, requires_access_dag, ) from airflow.api_fastapi.logging.decorators import action_logging from airflow.exceptions import AirflowException, DagNotFound -from airflow.models import DAG, DagModel +from airflow.models import DagModel +from airflow.models.dag_favorite import DagFavorite from airflow.models.dagrun import DagRun dags_router = AirflowRouter(tags=["DAG"], prefix="/dags") @@ -81,6 +87,8 @@ def get_dags( exclude_stale: QueryExcludeStaleFilter, paused: QueryPausedFilter, last_dag_run_state: QueryLastDagRunStateFilter, + bundle_name: QueryBundleNameFilter, + bundle_version: QueryBundleVersionFilter, dag_run_start_date_range: Annotated[ RangeFilter, Depends(datetime_range_filter_factory("dag_run_start_date", DagRun, "start_date")) ], @@ -112,33 +120,32 @@ def get_dags( ], readable_dags_filter: ReadableDagsFilterDep, session: SessionDep, + is_favorite: QueryFavoriteFilter, ) -> DAGCollectionResponse: """Get all DAGs.""" - dag_runs_select = None - - if dag_run_state.value or dag_run_start_date_range.is_active() or dag_run_end_date_range.is_active(): - dag_runs_select, _ = paginated_select( - statement=select(DagRun), - filters=[ - dag_run_start_date_range, - dag_run_end_date_range, - dag_run_state, - ], - session=session, - ) - dag_runs_select = dag_runs_select.cte() + query = generate_dag_with_latest_run_query( + max_run_filters=[ + dag_run_start_date_range, + dag_run_end_date_range, + dag_run_state, + last_dag_run_state, + ], + order_by=order_by, + ) dags_select, total_entries = paginated_select( - statement=generate_dag_with_latest_run_query(dag_runs_select), + statement=query, filters=[ exclude_stale, paused, dag_id_pattern, dag_display_name_pattern, tags, + is_favorite, owners, - last_dag_run_state, readable_dags_filter, + bundle_name, + bundle_version, ], order_by=order_by, offset=offset, @@ -165,12 +172,13 @@ def get_dags( ), dependencies=[Depends(requires_access_dag(method="GET"))], ) -def get_dag(dag_id: str, session: SessionDep, request: Request) -> DAGResponse: +def get_dag( + dag_id: str, + session: SessionDep, + dag_bag: DagBagDep, +) -> DAGResponse: """Get basic information about a DAG.""" - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") - + dag = get_latest_version_of_dag(dag_bag, dag_id, session) dag_model: DagModel = session.get(DagModel, dag_id) if not dag_model: raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unable to obtain dag with id {dag_id} from session") @@ -192,11 +200,9 @@ def get_dag(dag_id: str, session: SessionDep, request: Request) -> DAGResponse: ), dependencies=[Depends(requires_access_dag(method="GET"))], ) -def get_dag_details(dag_id: str, session: SessionDep, request: Request) -> DAGDetailsResponse: +def get_dag_details(dag_id: str, session: SessionDep, dag_bag: DagBagDep) -> DAGDetailsResponse: """Get details of DAG.""" - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") + dag = get_latest_version_of_dag(dag_bag, dag_id, session) dag_model: DagModel = session.get(DagModel, dag_id) if not dag_model: @@ -271,7 +277,6 @@ def patch_dags( dag_id_pattern: QueryDagIdPatternSearchWithNone, exclude_stale: QueryExcludeStaleFilter, paused: QueryPausedFilter, - last_dag_run_state: QueryLastDagRunStateFilter, editable_dags_filter: EditableDagsFilterDep, session: SessionDep, update_mask: list[str] | None = Query(None), @@ -288,18 +293,14 @@ def patch_dags( except ValidationError as e: raise RequestValidationError(errors=e.errors()) - # todo: this is not used? - update_mask = ["is_paused"] - dags_select, total_entries = paginated_select( - statement=generate_dag_with_latest_run_query(), + statement=select(DagModel), filters=[ exclude_stale, paused, dag_id_pattern, tags, owners, - last_dag_run_state, editable_dags_filter, ], order_by=None, @@ -322,6 +323,54 @@ def patch_dags( ) +@dags_router.post( + "/{dag_id}/favorite", + status_code=status.HTTP_204_NO_CONTENT, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET")), Depends(action_logging())], +) +def favorite_dag(dag_id: str, session: SessionDep, user: GetUserDep): + """Mark the DAG as favorite.""" + dag = session.get(DagModel, dag_id) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"DAG with id '{dag_id}' not found") + + user_id = str(user.get_id()) + session.execute(insert(DagFavorite).values(dag_id=dag_id, user_id=user_id)) + + +@dags_router.post( + "/{dag_id}/unfavorite", + status_code=status.HTTP_204_NO_CONTENT, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND, status.HTTP_409_CONFLICT]), + dependencies=[Depends(requires_access_dag(method="GET")), Depends(action_logging())], +) +def unfavorite_dag(dag_id: str, session: SessionDep, user: GetUserDep): + """Unmark the DAG as favorite.""" + dag = session.get(DagModel, dag_id) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"DAG with id '{dag_id}' not found") + + user_id = str(user.get_id()) + + favorite_exists = session.execute( + select(DagFavorite).where( + DagFavorite.dag_id == dag_id, + DagFavorite.user_id == user_id, + ) + ).first() + + if not favorite_exists: + raise HTTPException(status.HTTP_409_CONFLICT, detail="DAG is not marked as favorite") + + session.execute( + delete(DagFavorite).where( + DagFavorite.dag_id == dag_id, + DagFavorite.user_id == user_id, + ) + ) + + @dags_router.delete( "/{dag_id}", responses=create_openapi_http_exception_doc( diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/event_logs.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/event_logs.py index 33b3c68e564a7..e721162389991 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/event_logs.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/event_logs.py @@ -116,7 +116,7 @@ def get_event_logs( ], ) -> EventLogCollectionResponse: """Get all Event Logs.""" - query = select(Log).group_by(Log.id) + query = select(Log) event_logs_select, total_entries = paginated_select( statement=query, order_by=order_by, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py index 867bd8033fc4f..1dbbf0db8b70c 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py @@ -17,21 +17,23 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast -from fastapi import Depends, HTTPException, Request, status +from fastapi import Depends, HTTPException, status from sqlalchemy.sql import select +from airflow.api_fastapi.common.dagbag import DagBagDep, get_dag_for_run_or_latest_version from airflow.api_fastapi.common.db.common import SessionDep from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.extra_links import ExtraLinkCollectionResponse from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import DagAccessEntity, requires_access_dag from airflow.exceptions import TaskNotFound +from airflow.models import DagRun if TYPE_CHECKING: - from airflow.models import DAG - + from airflow.models.mappedoperator import MappedOperator + from airflow.serialization.serialized_objects import SerializedBaseOperator extra_links_router = AirflowRouter( tags=["Extra Links"], prefix="/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links" @@ -49,18 +51,19 @@ def get_extra_links( dag_run_id: str, task_id: str, session: SessionDep, - request: Request, + dag_bag: DagBagDep, map_index: int = -1, ) -> ExtraLinkCollectionResponse: """Get extra links for task instance.""" from airflow.models.taskinstance import TaskInstance - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with ID = {dag_id} not found") + dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id)) + + dag = get_dag_for_run_or_latest_version(dag_bag, dag_run, dag_id, session) try: - task = dag.get_task(task_id) + # TODO (GH-52141): Make dag a db-backed object so it only returns db-backed tasks. + task = cast("MappedOperator | SerializedBaseOperator", dag.get_task(task_id)) except TaskNotFound: raise HTTPException(status.HTTP_404_NOT_FOUND, f"Task with ID = {task_id} not found") @@ -76,7 +79,7 @@ def get_extra_links( if not ti: raise HTTPException( status.HTTP_404_NOT_FOUND, - f"DAG Run with ID = {dag_run_id} not found", + "TaskInstance not found", ) all_extra_link_pairs = ( diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py new file mode 100644 index 0000000000000..9f17bc3fd714d --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -0,0 +1,352 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Annotated + +import structlog +from fastapi import Depends, HTTPException, status +from sqlalchemy import select +from sqlalchemy.orm import joinedload + +from airflow._shared.timezones import timezone +from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.db.common import SessionDep, paginated_select +from airflow.api_fastapi.common.parameters import ( + QueryHITLDetailBodySearch, + QueryHITLDetailDagIdFilter, + QueryHITLDetailDagIdPatternSearch, + QueryHITLDetailDagRunIdFilter, + QueryHITLDetailResponseReceivedFilter, + QueryHITLDetailSubjectSearch, + QueryHITLDetailTaskIdFilter, + QueryHITLDetailTaskIdPatternSearch, + QueryHITLDetailUserIdFilter, + QueryLimit, + QueryOffset, + QueryTIStateFilter, + SortParam, +) +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.hitl import ( + HITLDetail, + HITLDetailCollection, + HITLDetailResponse, + UpdateHITLDetailPayload, +) +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.security import GetUserDep, ReadableTIFilterDep, requires_access_dag +from airflow.models.hitl import HITLDetail as HITLDetailModel +from airflow.models.taskinstance import TaskInstance as TI + +hitl_router = AirflowRouter(tags=["HumanInTheLoop"], prefix="/hitlDetails") + +log = structlog.get_logger(__name__) + + +def _get_task_instance( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int | None = None, +) -> TI: + query = select(TI).where( + TI.dag_id == dag_id, + TI.run_id == dag_run_id, + TI.task_id == task_id, + ) + + if map_index is not None: + query = query.where(TI.map_index == map_index) + + task_instance = session.scalar(query) + if task_instance is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + if map_index is None and task_instance.map_index != -1: + raise HTTPException( + status.HTTP_404_NOT_FOUND, "Task instance is mapped, add the map_index value to the URL" + ) + + return task_instance + + +def _update_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, + map_index: int | None = None, +) -> HITLDetailResponse: + task_instance = _get_task_instance( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + ti_id_str = str(task_instance.id) + hitl_detail_model = session.scalar(select(HITLDetailModel).where(HITLDetailModel.ti_id == ti_id_str)) + if not hitl_detail_model: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"Human-in-the-loop detail does not exist for Task Instance with id {ti_id_str}", + ) + + if hitl_detail_model.response_received: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop detail has already been updated for Task Instance with id {ti_id_str} " + "and is not allowed to write again.", + ) + + hitl_detail_model.user_id = user.get_id() + hitl_detail_model.response_at = timezone.utcnow() + hitl_detail_model.chosen_options = update_hitl_detail_payload.chosen_options + hitl_detail_model.params_input = update_hitl_detail_payload.params_input + session.add(hitl_detail_model) + session.commit() + return HITLDetailResponse.model_validate(hitl_detail_model) + + +def _get_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int | None = None, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + task_instance = _get_task_instance( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + if task_instance is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + + ti_id_str = str(task_instance.id) + hitl_detail_model = session.scalar( + select(HITLDetailModel) + .where(HITLDetailModel.ti_id == ti_id_str) + .options(joinedload(HITLDetailModel.task_instance)) + ) + if not hitl_detail_model: + log.error("Human-in-the-loop detail not found") + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={ + "reason": "not_found", + "message": "Human-in-the-loop detail not found", + }, + ) + return HITLDetail.model_validate(hitl_detail_model) + + +@hitl_router.patch( + "/{dag_id}/{dag_run_id}/{task_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.HITL_DETAIL))], +) +def update_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, +) -> HITLDetailResponse: + """Update a Human-in-the-loop detail.""" + return _update_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + update_hitl_detail_payload=update_hitl_detail_payload, + user=user, + map_index=None, + ) + + +@hitl_router.patch( + "/{dag_id}/{dag_run_id}/{task_id}/{map_index}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.HITL_DETAIL))], +) +def update_mapped_ti_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, + map_index: int, +) -> HITLDetailResponse: + """Update a Human-in-the-loop detail.""" + return _update_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + update_hitl_detail_payload=update_hitl_detail_payload, + user=user, + map_index=map_index, + ) + + +@hitl_router.get( + "/{dag_id}/{dag_run_id}/{task_id}", + status_code=status.HTTP_200_OK, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.HITL_DETAIL))], +) +def get_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + return _get_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=None, + ) + + +@hitl_router.get( + "/{dag_id}/{dag_run_id}/{task_id}/{map_index}", + status_code=status.HTTP_200_OK, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.HITL_DETAIL))], +) +def get_mapped_ti_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + return _get_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + + +@hitl_router.get( + "/", + status_code=status.HTTP_200_OK, + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.HITL_DETAIL))], +) +def get_hitl_details( + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends( + SortParam( + [ + "ti_id", + "subject", + "response_at", + "task_instance.dag_id", + "task_instance.run_id", + ], + HITLDetailModel, + to_replace={ + "dag_id": TI.dag_id, + "run_id": TI.run_id, + }, + ).dynamic_depends(), + ), + ], + session: SessionDep, + # ti related filter + readable_ti_filter: ReadableTIFilterDep, + dag_id: QueryHITLDetailDagIdFilter, + dag_id_pattern: QueryHITLDetailDagIdPatternSearch, + dag_run_id: QueryHITLDetailDagRunIdFilter, + task_id: QueryHITLDetailTaskIdFilter, + task_id_pattern: QueryHITLDetailTaskIdPatternSearch, + ti_state: QueryTIStateFilter, + # hitl detail related filter + response_received: QueryHITLDetailResponseReceivedFilter, + user_id: QueryHITLDetailUserIdFilter, + subject_patten: QueryHITLDetailSubjectSearch, + body_patten: QueryHITLDetailBodySearch, +) -> HITLDetailCollection: + """Get Human-in-the-loop details.""" + query = ( + select(HITLDetailModel) + .join(TI, HITLDetailModel.ti_id == TI.id) + .options(joinedload(HITLDetailModel.task_instance)) + ) + hitl_detail_select, total_entries = paginated_select( + statement=query, + filters=[ + # ti related filter + readable_ti_filter, + dag_id, + dag_id_pattern, + dag_run_id, + task_id, + task_id_pattern, + ti_state, + # hitl detail related filter + response_received, + user_id, + subject_patten, + body_patten, + ], + offset=offset, + limit=limit, + order_by=order_by, + session=session, + ) + + hitl_details = session.scalars(hitl_detail_select) + + return HITLDetailCollection( + hitl_details=hitl_details, + total_entries=total_entries, + ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/import_error.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/import_error.py index 4beb0ea2cd416..a989a7eef168c 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/import_error.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/import_error.py @@ -22,7 +22,7 @@ from typing import Annotated from fastapi import Depends, HTTPException, status -from sqlalchemy import select +from sqlalchemy import and_, select from airflow.api_fastapi.app import get_auth_manager from airflow.api_fastapi.auth.managers.models.batch_apis import IsAuthorizedDagRequest @@ -151,15 +151,23 @@ def get_import_errors( # if the user doesn't have access to all DAGs, only display errors from visible DAGs readable_dag_ids = auth_manager.get_authorized_dag_ids(method="GET", user=user) # Build a cte that fetches dag_ids for each file location - visiable_files_cte = ( - select(DagModel.fileloc, DagModel.dag_id).where(DagModel.dag_id.in_(readable_dag_ids)).cte() + visible_files_cte = ( + select(DagModel.relative_fileloc, DagModel.dag_id, DagModel.bundle_name) + .where(DagModel.dag_id.in_(readable_dag_ids)) + .cte() ) # Prepare the import errors query by joining with the cte. # Each returned row will be a tuple: (ParseImportError, dag_id) import_errors_stmt = ( - select(ParseImportError, visiable_files_cte.c.dag_id) - .join(visiable_files_cte, ParseImportError.filename == visiable_files_cte.c.fileloc) + select(ParseImportError, visible_files_cte.c.dag_id) + .join( + visible_files_cte, + and_( + ParseImportError.filename == visible_files_cte.c.relative_fileloc, + ParseImportError.bundle_name == visible_files_cte.c.bundle_name, + ), + ) .order_by(ParseImportError.id) ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/job.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/job.py index 997a9152909d0..b1a35913207d1 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/job.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/job.py @@ -20,6 +20,7 @@ from fastapi import Depends, status from sqlalchemy import select +from sqlalchemy.orm import joinedload from airflow.api_fastapi.common.db.common import ( SessionDep, @@ -101,7 +102,12 @@ def get_jobs( is_alive: bool | None = None, ) -> JobCollectionResponse: """Get all jobs.""" - base_select = select(Job).where(Job.state == JobState.RUNNING).order_by(Job.latest_heartbeat.desc()) + base_select = ( + select(Job) + .where(Job.state == JobState.RUNNING) + .order_by(Job.latest_heartbeat.desc()) + .options(joinedload(Job.dag_model)) + ) jobs_select, total_entries = paginated_select( statement=base_select, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py index 3483453d829a1..5f1d04a244aa0 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py @@ -20,17 +20,19 @@ import contextlib import textwrap -from fastapi import Depends, HTTPException, Request, Response, status +from fastapi import Depends, HTTPException, Request, status +from fastapi.responses import StreamingResponse from itsdangerous import BadSignature, URLSafeSerializer -from pydantic import PositiveInt +from pydantic import NonNegativeInt, PositiveInt from sqlalchemy.orm import joinedload from sqlalchemy.sql import select +from airflow.api_fastapi.common.dagbag import DagBagDep from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.headers import HeaderAcceptJsonOrText +from airflow.api_fastapi.common.headers import HeaderAcceptJsonOrNdjson from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.common.types import Mimetype -from airflow.api_fastapi.core_api.datamodels.log import TaskInstancesLogResponse +from airflow.api_fastapi.core_api.datamodels.log import ExternalLogUrlResponse, TaskInstancesLogResponse from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import DagAccessEntity, requires_access_dag from airflow.exceptions import TaskNotFound @@ -42,13 +44,14 @@ tags=["Task Instance"], prefix="/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances" ) -text_example_response_for_get_log = { - Mimetype.TEXT: { +ndjson_example_response_for_get_log = { + Mimetype.NDJSON: { "schema": { "type": "string", "example": textwrap.dedent( """\ - content + {"content": "content"} + {"content": "content"} """ ), } @@ -62,7 +65,7 @@ **create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), status.HTTP_200_OK: { "description": "Successful Response", - "content": text_example_response_for_get_log, + "content": ndjson_example_response_for_get_log, }, }, dependencies=[Depends(requires_access_dag("GET", DagAccessEntity.TASK_LOGS))], @@ -73,9 +76,10 @@ def get_log( dag_id: str, dag_run_id: str, task_id: str, - try_number: PositiveInt, - accept: HeaderAcceptJsonOrText, + try_number: NonNegativeInt, + accept: HeaderAcceptJsonOrNdjson, request: Request, + dag_bag: DagBagDep, session: SessionDep, full_content: bool = False, map_index: int = -1, @@ -109,18 +113,25 @@ def get_log( TaskInstance.dag_id == dag_id, TaskInstance.run_id == dag_run_id, TaskInstance.map_index == map_index, + TaskInstance.try_number == try_number, ) .join(TaskInstance.dag_run) .options(joinedload(TaskInstance.trigger).joinedload(Trigger.triggerer_job)) + .options(joinedload(TaskInstance.dag_model)) ) ti = session.scalar(query) if ti is None: - query = select(TaskInstanceHistory).where( - TaskInstanceHistory.task_id == task_id, - TaskInstanceHistory.dag_id == dag_id, - TaskInstanceHistory.run_id == dag_run_id, - TaskInstanceHistory.map_index == map_index, - TaskInstanceHistory.try_number == try_number, + query = ( + select(TaskInstanceHistory) + .where( + TaskInstanceHistory.task_id == task_id, + TaskInstanceHistory.dag_id == dag_id, + TaskInstanceHistory.run_id == dag_run_id, + TaskInstanceHistory.map_index == map_index, + TaskInstanceHistory.try_number == try_number, + ) + .options(joinedload(TaskInstanceHistory.dag_run)) + # we need to joinedload the dag_run, since FileTaskHandler._render_filename needs ti.dag_run ) ti = session.scalar(query) @@ -128,26 +139,68 @@ def get_log( metadata["end_of_log"] = True raise HTTPException(status.HTTP_404_NOT_FOUND, "TaskInstance not found") - dag = request.app.state.dag_bag.get_dag(dag_id) + dag = dag_bag.get_dag_for_run(ti.dag_run, session=session) if dag: with contextlib.suppress(TaskNotFound): ti.task = dag.get_task(ti.task_id) - if accept == Mimetype.JSON or accept == Mimetype.ANY: # default - logs, metadata = task_log_reader.read_log_chunks(ti, try_number, metadata) - encoded_token = None + if accept == Mimetype.NDJSON: # only specified application/x-ndjson will return streaming response + # LogMetadata(TypedDict) is used as type annotation for log_reader; added ignore to suppress mypy error + log_stream = task_log_reader.read_log_stream(ti, try_number, metadata) # type: ignore[arg-type] + headers = None if not metadata.get("end_of_log", False): - encoded_token = URLSafeSerializer(request.app.state.secret_key).dumps(metadata) - return TaskInstancesLogResponse.model_construct(continuation_token=encoded_token, content=logs) - # text/plain, or something else we don't understand. Return raw log content - - # We need to exhaust the iterator before we can generate the continuation token. - # We could improve this by making it a streaming/async response, and by then setting the header using - # HTTP Trailers - logs = "".join(task_log_reader.read_log_stream(ti, try_number, metadata)) - headers = None - if not metadata.get("end_of_log", False): - headers = { - "Airflow-Continuation-Token": URLSafeSerializer(request.app.state.secret_key).dumps(metadata) - } - return Response(media_type="application/x-ndjson", content=logs, headers=headers) + headers = { + "Airflow-Continuation-Token": URLSafeSerializer(request.app.state.secret_key).dumps(metadata) + } + return StreamingResponse(media_type="application/x-ndjson", content=log_stream, headers=headers) + + # application/json, or something else we don't understand. + # Return JSON format, which will be more easily for users to debug. + + # LogMetadata(TypedDict) is used as type annotation for log_reader; added ignore to suppress mypy error + structured_log_stream, out_metadata = task_log_reader.read_log_chunks(ti, try_number, metadata) # type: ignore[arg-type] + encoded_token = None + if not out_metadata.get("end_of_log", False): + encoded_token = URLSafeSerializer(request.app.state.secret_key).dumps(out_metadata) + return TaskInstancesLogResponse.model_construct( + continuation_token=encoded_token, content=list(structured_log_stream) + ) + + +@task_instances_log_router.get( + "/{task_id}/externalLogUrl/{try_number}", + responses=create_openapi_http_exception_doc([status.HTTP_400_BAD_REQUEST, status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag("GET", DagAccessEntity.TASK_INSTANCE))], +) +def get_external_log_url( + dag_id: str, + dag_run_id: str, + task_id: str, + try_number: PositiveInt, + session: SessionDep, + map_index: int = -1, +) -> ExternalLogUrlResponse: + """Get external log URL for a specific task instance.""" + task_log_reader = TaskLogReader() + + if not task_log_reader.supports_external_link: + raise HTTPException(status.HTTP_400_BAD_REQUEST, "Task log handler does not support external logs.") + + # Fetch the task instance + query = ( + select(TaskInstance) + .where( + TaskInstance.task_id == task_id, + TaskInstance.dag_id == dag_id, + TaskInstance.run_id == dag_run_id, + TaskInstance.map_index == map_index, + ) + .options(joinedload(TaskInstance.dag_model)) + ) + ti = session.scalar(query) + + if ti is None: + raise HTTPException(status.HTTP_404_NOT_FOUND, "TaskInstance not found") + + url = task_log_reader.log_handler.get_external_log_url(ti, try_number) + return ExternalLogUrlResponse(url=url) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/plugins.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/plugins.py index 04ca85be43334..fe700178f6bf4 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/plugins.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/plugins.py @@ -21,12 +21,16 @@ from fastapi import Depends +from airflow import plugins_manager from airflow.api_fastapi.auth.managers.models.resource_details import AccessView from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset from airflow.api_fastapi.common.router import AirflowRouter -from airflow.api_fastapi.core_api.datamodels.plugins import PluginCollectionResponse, PluginResponse +from airflow.api_fastapi.core_api.datamodels.plugins import ( + PluginCollectionResponse, + PluginImportErrorCollectionResponse, + PluginResponse, +) from airflow.api_fastapi.core_api.security import requires_access_view -from airflow.plugins_manager import get_plugin_info plugins_router = AirflowRouter(tags=["Plugin"], prefix="/plugins") @@ -39,8 +43,25 @@ def get_plugins( limit: QueryLimit, offset: QueryOffset, ) -> PluginCollectionResponse: - plugins_info = sorted(get_plugin_info(), key=lambda x: x["name"]) + plugins_info = sorted(plugins_manager.get_plugin_info(), key=lambda x: x["name"]) return PluginCollectionResponse( plugins=cast("list[PluginResponse]", plugins_info[offset.value :][: limit.value]), total_entries=len(plugins_info), ) + + +@plugins_router.get( + "/importErrors", + dependencies=[Depends(requires_access_view(AccessView.PLUGINS))], +) +def import_errors() -> PluginImportErrorCollectionResponse: + plugins_manager.ensure_plugins_loaded() # make sure import_errors are loaded + + return PluginImportErrorCollectionResponse.model_validate( + { + "import_errors": [ + {"source": source, "error": error} for source, error in plugins_manager.import_errors.items() + ], + "total_entries": len(plugins_manager.import_errors), + } + ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/pools.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/pools.py index 8cdd25072270a..729b82f3bf89f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/pools.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/pools.py @@ -49,7 +49,7 @@ @pools_router.delete( - "/{pool_name}", + "/{pool_name:path}", status_code=status.HTTP_204_NO_CONTENT, responses=create_openapi_http_exception_doc( [ @@ -74,7 +74,7 @@ def delete_pool( @pools_router.get( - "/{pool_name}", + "/{pool_name:path}", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), dependencies=[Depends(requires_access_pool(method="GET"))], ) @@ -124,7 +124,7 @@ def get_pools( @pools_router.patch( - "/{pool_name}", + "/{pool_name:path}", responses=create_openapi_http_exception_doc( [ status.HTTP_400_BAD_REQUEST, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/providers.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/providers.py index abd7a1943fc29..813c890db5c19 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/providers.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/providers.py @@ -17,32 +17,19 @@ from __future__ import annotations -import re - from fastapi import Depends from airflow.api_fastapi.auth.managers.models.resource_details import AccessView from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset from airflow.api_fastapi.common.router import AirflowRouter -from airflow.api_fastapi.core_api.datamodels.providers import ProviderCollectionResponse, ProviderResponse +from airflow.api_fastapi.core_api.datamodels.providers import ProviderCollectionResponse from airflow.api_fastapi.core_api.security import requires_access_view -from airflow.providers_manager import ProviderInfo, ProvidersManager +from airflow.api_fastapi.core_api.services.public.providers import _provider_mapper +from airflow.providers_manager import ProvidersManager providers_router = AirflowRouter(tags=["Provider"], prefix="/providers") -def _remove_rst_syntax(value: str) -> str: - return re.sub("[`_<>]", "", value.strip(" \n.")) - - -def _provider_mapper(provider: ProviderInfo) -> ProviderResponse: - return ProviderResponse( - package_name=provider.data["package-name"], - description=_remove_rst_syntax(provider.data["description"]), - version=provider.version, - ) - - @providers_router.get( "", dependencies=[Depends(requires_access_view(AccessView.PROVIDERS))], diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/task_instances.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/task_instances.py index 540e1c49a9100..fd5155bb89bf5 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/task_instances.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/task_instances.py @@ -20,15 +20,18 @@ from typing import Annotated, Literal, cast import structlog -from fastapi import Depends, HTTPException, Query, Request, status -from fastapi.exceptions import RequestValidationError -from pydantic import ValidationError +from fastapi import Depends, HTTPException, Query, status from sqlalchemy import or_, select -from sqlalchemy.exc import MultipleResultsFound from sqlalchemy.orm import joinedload from sqlalchemy.sql.selectable import Select from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.dagbag import ( + DagBagDep, + get_dag_for_run, + get_dag_for_run_or_latest_version, + get_latest_version_of_dag, +) from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.parameters import ( FilterOptionEnum, @@ -51,7 +54,9 @@ float_range_filter_factory, ) from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.common import BulkBody, BulkResponse from airflow.api_fastapi.core_api.datamodels.task_instances import ( + BulkTaskInstanceBody, ClearTaskInstancesBody, PatchTaskInstanceBody, TaskDependencyCollectionResponse, @@ -63,11 +68,15 @@ ) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import GetUserDep, ReadableTIFilterDep, requires_access_dag +from airflow.api_fastapi.core_api.services.public.task_instances import ( + BulkTaskInstanceService, + _patch_task_instance_note, + _patch_task_instance_state, + _patch_ti_validate_request, +) from airflow.api_fastapi.logging.decorators import action_logging from airflow.exceptions import TaskNotFound -from airflow.listeners.listener import get_listener_manager from airflow.models import Base, DagRun -from airflow.models.dag import DAG from airflow.models.taskinstance import TaskInstance as TI, clear_task_instances from airflow.models.taskinstancehistory import TaskInstanceHistory as TIH from airflow.ti_deps.dep_context import DepContext @@ -93,9 +102,9 @@ def get_task_instance( query = ( select(TI) .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id) - .join(TI.dag_run) .options(joinedload(TI.rendered_task_instance_fields)) .options(joinedload(TI.dag_version)) + .options(joinedload(TI.dag_run).options(joinedload(DagRun.dag_model))) ) task_instance = session.scalar(query) @@ -121,7 +130,7 @@ def get_mapped_task_instances( dag_id: str, dag_run_id: str, task_id: str, - request: Request, + dag_bag: DagBagDep, run_after_range: Annotated[RangeFilter, Depends(datetime_range_filter_factory("run_after", TI))], logical_date_range: Annotated[RangeFilter, Depends(datetime_range_filter_factory("logical_date", TI))], start_date_range: Annotated[RangeFilter, Depends(datetime_range_filter_factory("start_date", TI))], @@ -171,14 +180,13 @@ def get_mapped_task_instances( .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id, TI.map_index >= 0) .join(TI.dag_run) .options(joinedload(TI.dag_version)) + .options(joinedload(TI.dag_run).options(joinedload(DagRun.dag_model))) ) # 0 can mean a mapped TI that expanded to an empty list, so it is not an automatic 404 unfiltered_total_count = get_query_count(query, session=session) if unfiltered_total_count == 0: - dag = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - error_message = f"DAG {dag_id} not found" - raise HTTPException(status.HTTP_404_NOT_FOUND, error_message) + dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id)) + dag = get_dag_for_run_or_latest_version(dag_bag, dag_run, dag_id, session) try: task = dag.get_task(task_id) except TaskNotFound: @@ -220,39 +228,40 @@ def get_mapped_task_instances( task_instances_prefix + "/{task_id}/dependencies", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], + operation_id="get_task_instance_dependencies", ) @task_instances_router.get( task_instances_prefix + "/{task_id}/{map_index}/dependencies", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], + operation_id="get_task_instance_dependencies_by_map_index", ) def get_task_instance_dependencies( dag_id: str, dag_run_id: str, task_id: str, session: SessionDep, - request: Request, + dag_bag: DagBagDep, map_index: int = -1, ) -> TaskDependencyCollectionResponse: """Get dependencies blocking task from getting scheduled.""" query = select(TI).where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id) - - if map_index == -1: - query = query.where(TI.map_index == -1) - else: - query = query.where(TI.map_index == map_index) + query = query.where(TI.map_index == map_index) result = session.execute(query).one_or_none() if result is None: - error_message = f"Task Instance not found for dag_id={dag_id}, run_id={dag_run_id}, task_id={task_id}" + error_message = ( + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) raise HTTPException(status.HTTP_404_NOT_FOUND, error_message) ti = result[0] deps = [] if ti.state in [None, TaskInstanceState.SCHEDULED]: - dag = request.app.state.dag_bag.get_dag(ti.dag_id) + dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == ti.dag_id, DagRun.run_id == ti.run_id)) + dag = dag_bag.get_dag_for_run(dag_run, session=session) if dag: try: @@ -296,6 +305,7 @@ def _query(orm_object: Base) -> Select: orm_object.map_index == map_index, ) .options(joinedload(orm_object.dag_version)) + .options(joinedload(orm_object.dag_run).options(joinedload(DagRun.dag_model))) ) return query @@ -353,9 +363,9 @@ def get_mapped_task_instance( query = ( select(TI) .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id, TI.map_index == map_index) - .join(TI.dag_run) .options(joinedload(TI.rendered_task_instance_fields)) .options(joinedload(TI.dag_version)) + .options(joinedload(TI.dag_run).options(joinedload(DagRun.dag_model))) ) task_instance = session.scalar(query) @@ -376,7 +386,7 @@ def get_mapped_task_instance( def get_task_instances( dag_id: str, dag_run_id: str, - request: Request, + dag_bag: DagBagDep, task_id: Annotated[FilterParam[str | None], Depends(filter_param_factory(TI.task_id, str | None))], run_after_range: Annotated[RangeFilter, Depends(datetime_range_filter_factory("run_after", TI))], logical_date_range: Annotated[RangeFilter, Depends(datetime_range_filter_factory("logical_date", TI))], @@ -429,14 +439,14 @@ def get_task_instances( This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs and DAG runs. """ - query = select(TI).join(TI.dag_run).outerjoin(TI.dag_version).options(joinedload(TI.dag_version)) - - if dag_id != "~": - dag = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with dag_id: `{dag_id}` was not found") - query = query.where(TI.dag_id == dag_id) - + dag_run = None + query = ( + select(TI) + .join(TI.dag_run) + .outerjoin(TI.dag_version) + .options(joinedload(TI.dag_version)) + .options(joinedload(TI.dag_run).options(joinedload(DagRun.dag_model))) + ) if dag_run_id != "~": dag_run = session.scalar(select(DagRun).filter_by(run_id=dag_run_id)) if not dag_run: @@ -445,6 +455,9 @@ def get_task_instances( f"DagRun with run_id: `{dag_run_id}` was not found", ) query = query.where(TI.run_id == dag_run_id) + if dag_id != "~": + get_dag_for_run_or_latest_version(dag_bag, dag_run, dag_id, session) + query = query.where(TI.dag_id == dag_id) task_instance_select, total_entries = paginated_select( statement=query, @@ -527,9 +540,9 @@ def get_task_instances_batch( order_by = SortParam( ["id", "state", "duration", "start_date", "end_date", "map_index"], TI, - ).set_value(body.order_by) + ).set_value([body.order_by] if body.order_by else None) - query = select(TI).join(TI.dag_run) + query = select(TI) task_instance_select, total_entries = paginated_select( statement=query, filters=[ @@ -553,7 +566,9 @@ def get_task_instances_batch( session=session, ) task_instance_select = task_instance_select.options( - joinedload(TI.rendered_task_instance_fields), joinedload(TI.task_instance_note) + joinedload(TI.rendered_task_instance_fields), + joinedload(TI.task_instance_note), + joinedload(TI.dag_run).options(joinedload(DagRun.dag_model)), ) task_instances = session.scalars(task_instance_select) @@ -633,15 +648,12 @@ def get_mapped_task_instance_try_details( ) def post_clear_task_instances( dag_id: str, - request: Request, + dag_bag: DagBagDep, body: ClearTaskInstancesBody, session: SessionDep, ) -> TaskInstanceCollectionResponse: """Clear task instances.""" - dag = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - error_message = f"DAG {dag_id} not found" - raise HTTPException(status.HTTP_404_NOT_FOUND, error_message) + dag = get_latest_version_of_dag(dag_bag, dag_id, session) reset_dag_runs = body.reset_dag_runs dry_run = body.dry_run @@ -659,7 +671,8 @@ def post_clear_task_instances( if dag_run is None: error_message = f"Dag Run id {dag_run_id} not found in dag {dag_id}" raise HTTPException(status.HTTP_404_NOT_FOUND, error_message) - + # Get the specific dag version: + dag = get_dag_for_run(dag_bag, dag_run, session) if past or future: raise HTTPException( status.HTTP_400_BAD_REQUEST, @@ -687,28 +700,37 @@ def post_clear_task_instances( # If we had upstream/downstream etc then also include those! task_ids.extend(tid for tid in dag.task_dict if tid != task_id) - task_instances = dag.clear( - dry_run=True, - run_id=None if past or future else dag_run_id, - task_ids=task_ids, - dag_bag=request.app.state.dag_bag, - session=session, - **body.model_dump( - include={ - "start_date", - "end_date", - "only_failed", - "only_running", - } - ), - ) + # Prepare common parameters + common_params = { + "dry_run": True, + "task_ids": task_ids, + "dag_bag": dag_bag, + "session": session, + "run_on_latest_version": body.run_on_latest_version, + "only_failed": body.only_failed, + "only_running": body.only_running, + } + + if dag_run_id is not None and not (past or future): + # Use run_id-based clearing when we have a specific dag_run_id and not using past/future + task_instances = dag.clear( + **common_params, + run_id=dag_run_id, + ) + else: + # Use date-based clearing when no dag_run_id or when past/future is specified + task_instances = dag.clear( + **common_params, + start_date=body.start_date, + end_date=body.end_date, + ) if not dry_run: clear_task_instances( task_instances, session, - dag, DagRunState.QUEUED if reset_dag_runs else False, + run_on_latest_version=body.run_on_latest_version, ) return TaskInstanceCollectionResponse( @@ -717,64 +739,13 @@ def post_clear_task_instances( ) -def _patch_ti_validate_request( - dag_id: str, - dag_run_id: str, - task_id: str, - request: Request, - body: PatchTaskInstanceBody, - session: SessionDep, - map_index: int = -1, - update_mask: list[str] | None = Query(None), -) -> tuple[DAG, TI, dict]: - dag = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG {dag_id} not found") - - if not dag.has_task(task_id): - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Task '{task_id}' not found in DAG '{dag_id}'") - - query = ( - select(TI) - .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id) - .join(TI.dag_run) - .options(joinedload(TI.rendered_task_instance_fields)) - ) - if map_index == -1: - query = query.where(or_(TI.map_index == -1, TI.map_index is None)) - else: - query = query.where(TI.map_index == map_index) - - try: - ti = session.scalar(query) - except MultipleResultsFound: - raise HTTPException( - status.HTTP_400_BAD_REQUEST, - "Multiple task instances found. As the TI is mapped, add the map_index value to the URL", - ) - - err_msg_404 = f"Task Instance not found for dag_id={dag_id}, run_id={dag_run_id}, task_id={task_id}" - if ti is None: - raise HTTPException(status.HTTP_404_NOT_FOUND, err_msg_404) - - fields_to_update = body.model_fields_set - if update_mask: - fields_to_update = fields_to_update.intersection(update_mask) - else: - try: - PatchTaskInstanceBody.model_validate(body) - except ValidationError as e: - raise RequestValidationError(errors=e.errors()) - - return dag, ti, body.model_dump(include=fields_to_update, by_alias=True) - - @task_instances_router.patch( task_instances_prefix + "/{task_id}/dry_run", responses=create_openapi_http_exception_doc( [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST], ), dependencies=[Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.TASK_INSTANCE))], + operation_id="patch_task_instance_dry_run", ) @task_instances_router.patch( task_instances_prefix + "/{task_id}/{map_index}/dry_run", @@ -782,30 +753,29 @@ def _patch_ti_validate_request( [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST], ), dependencies=[Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.TASK_INSTANCE))], + operation_id="patch_task_instance_dry_run_by_map_index", ) def patch_task_instance_dry_run( dag_id: str, dag_run_id: str, task_id: str, - request: Request, + dag_bag: DagBagDep, body: PatchTaskInstanceBody, session: SessionDep, - map_index: int = -1, + map_index: int | None = None, update_mask: list[str] | None = Query(None), ) -> TaskInstanceCollectionResponse: """Update a task instance dry_run mode.""" - dag, ti, data = _patch_ti_validate_request( - dag_id, dag_run_id, task_id, request, body, session, map_index, update_mask + dag, tis, data = _patch_ti_validate_request( + dag_id, dag_run_id, task_id, dag_bag, body, session, map_index, update_mask ) - tis: list[TI] = [] - if data.get("new_state"): tis = ( dag.set_task_instance_state( task_id=task_id, run_id=dag_run_id, - map_indexes=[map_index], + map_indexes=[map_index] if map_index is not None else None, state=data["new_state"], upstream=body.include_upstream, downstream=body.include_downstream, @@ -817,9 +787,6 @@ def patch_task_instance_dry_run( or [] ) - elif "note" in data: - tis = [ti] - return TaskInstanceCollectionResponse( task_instances=[ TaskInstanceResponse.model_validate( @@ -831,6 +798,24 @@ def patch_task_instance_dry_run( ) +@task_instances_router.patch( + task_instances_prefix, + dependencies=[Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def bulk_task_instances( + request: BulkBody[BulkTaskInstanceBody], + session: SessionDep, + dag_id: str, + dag_bag: DagBagDep, + dag_run_id: str, + user: GetUserDep, +) -> BulkResponse: + """Bulk update, and delete task instances.""" + return BulkTaskInstanceService( + session=session, request=request, dag_id=dag_id, dag_run_id=dag_run_id, dag_bag=dag_bag, user=user + ).handle_request() + + @task_instances_router.patch( task_instances_prefix + "/{task_id}", responses=create_openapi_http_exception_doc( @@ -840,6 +825,7 @@ def patch_task_instance_dry_run( Depends(action_logging()), Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.TASK_INSTANCE)), ], + operation_id="patch_task_instance", ) @task_instances_router.patch( task_instances_prefix + "/{task_id}/{map_index}", @@ -850,63 +836,79 @@ def patch_task_instance_dry_run( Depends(action_logging()), Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.TASK_INSTANCE)), ], + operation_id="patch_task_instance_by_map_index", ) def patch_task_instance( dag_id: str, dag_run_id: str, task_id: str, - request: Request, + dag_bag: DagBagDep, body: PatchTaskInstanceBody, user: GetUserDep, session: SessionDep, - map_index: int = -1, + map_index: int | None = None, update_mask: list[str] | None = Query(None), -) -> TaskInstanceResponse: +) -> TaskInstanceCollectionResponse: """Update a task instance.""" - dag, ti, data = _patch_ti_validate_request( - dag_id, dag_run_id, task_id, request, body, session, map_index, update_mask + dag, tis, data = _patch_ti_validate_request( + dag_id, dag_run_id, task_id, dag_bag, body, session, map_index, update_mask ) for key, _ in data.items(): if key == "new_state": - tis: list[TI] = dag.set_task_instance_state( + _patch_task_instance_state( task_id=task_id, - run_id=dag_run_id, - map_indexes=[map_index], - state=data["new_state"], - upstream=body.include_upstream, - downstream=body.include_downstream, - future=body.include_future, - past=body.include_past, - commit=True, + dag_run_id=dag_run_id, + dag=dag, + task_instance_body=body, + data=data, session=session, ) - if not tis: - raise HTTPException( - status.HTTP_409_CONFLICT, f"Task id {task_id} is already in {data['new_state']} state" - ) - ti = tis[0] if isinstance(tis, list) else tis - try: - if data["new_state"] == TaskInstanceState.SUCCESS: - get_listener_manager().hook.on_task_instance_success( - previous_state=None, task_instance=ti - ) - elif data["new_state"] == TaskInstanceState.FAILED: - get_listener_manager().hook.on_task_instance_failed( - previous_state=None, - task_instance=ti, - error=f"TaskInstance's state was manually set to `{TaskInstanceState.FAILED}`.", - ) - except Exception: - log.exception("error calling listener") elif key == "note": - if update_mask or body.note is not None: - if ti.task_instance_note is None: - ti.note = (body.note, user.get_id()) - else: - ti.task_instance_note.content = body.note - ti.task_instance_note.user_id = user.get_id() - session.commit() - - return TaskInstanceResponse.model_validate(ti) + _patch_task_instance_note( + task_instance_body=body, + tis=tis, + user=user, + update_mask=update_mask, + ) + + return TaskInstanceCollectionResponse( + task_instances=[ + TaskInstanceResponse.model_validate( + ti, + ) + for ti in tis + ], + total_entries=len(tis), + ) + + +@task_instances_router.delete( + task_instances_prefix + "/{task_id}", + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="DELETE", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def delete_task_instance( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int = -1, +) -> None: + """Delete a task instance.""" + query = select(TI).where( + TI.dag_id == dag_id, + TI.run_id == dag_run_id, + TI.task_id == task_id, + ) + + query = query.where(TI.map_index == map_index) + task_instance = session.scalar(query) + if task_instance is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + + session.delete(task_instance) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py index 6cd46c4e67b01..c548989835d83 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py @@ -20,15 +20,16 @@ from operator import attrgetter from typing import cast -from fastapi import Depends, HTTPException, Request, status +from fastapi import Depends, HTTPException, status from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag +from airflow.api_fastapi.common.db.common import SessionDep from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.tasks import TaskCollectionResponse, TaskResponse from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import requires_access_dag from airflow.exceptions import TaskNotFound -from airflow.models import DAG tasks_router = AirflowRouter(tags=["Task"], prefix="/dags/{dag_id}/tasks") @@ -45,13 +46,12 @@ ) def get_tasks( dag_id: str, - request: Request, + dag_bag: DagBagDep, + session: SessionDep, order_by: str = "task_id", ) -> TaskCollectionResponse: """Get tasks for DAG.""" - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") + dag = get_latest_version_of_dag(dag_bag, dag_id, session) try: tasks = sorted(dag.tasks, key=attrgetter(order_by.lstrip("-")), reverse=(order_by[0:1] == "-")) except AttributeError as err: @@ -72,11 +72,9 @@ def get_tasks( ), dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK))], ) -def get_task(dag_id: str, task_id, request: Request) -> TaskResponse: +def get_task(dag_id: str, task_id, session: SessionDep, dag_bag: DagBagDep) -> TaskResponse: """Get simplified representation of a task.""" - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") + dag = get_latest_version_of_dag(dag_bag, dag_id, session) try: task = dag.get_task(task_id=task_id) except TaskNotFound: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/variables.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/variables.py index e55a6f540a725..36ee71c17ecd5 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/variables.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/variables.py @@ -47,7 +47,7 @@ @variables_router.delete( - "/{variable_key}", + "/{variable_key:path}", status_code=status.HTTP_204_NO_CONTENT, responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), dependencies=[Depends(action_logging()), Depends(requires_access_variable("DELETE"))], @@ -64,7 +64,7 @@ def delete_variable( @variables_router.get( - "/{variable_key}", + "/{variable_key:path}", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), dependencies=[Depends(requires_access_variable("GET"))], ) @@ -121,7 +121,7 @@ def get_variables( @variables_router.patch( - "/{variable_key}", + "/{variable_key:path}", responses=create_openapi_http_exception_doc( [ status.HTTP_400_BAD_REQUEST, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/xcom.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/xcom.py index 4b1361d402738..d3c2721c161e9 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/xcom.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/xcom.py @@ -19,10 +19,12 @@ import copy from typing import Annotated -from fastapi import Depends, HTTPException, Query, Request, status +from fastapi import Depends, HTTPException, Query, status from sqlalchemy import and_, select +from sqlalchemy.orm import joinedload from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.dagbag import DagBagDep, get_dag_for_run_or_latest_version from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset from airflow.api_fastapi.common.router import AirflowRouter @@ -37,9 +39,8 @@ from airflow.api_fastapi.core_api.security import ReadableXComFilterDep, requires_access_dag from airflow.api_fastapi.logging.decorators import action_logging from airflow.exceptions import TaskNotFound -from airflow.models import DAG, DagRun as DR +from airflow.models import DagRun as DR from airflow.models.xcom import XComModel -from airflow.settings import conf xcom_router = AirflowRouter( tags=["XCom"], prefix="/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries" @@ -67,40 +68,41 @@ def get_xcom_entry( stringify: Annotated[bool, Query()] = False, ) -> XComResponseNative | XComResponseString: """Get an XCom entry.""" - if deserialize: - if not conf.getboolean("api", "enable_xcom_deserialize_support", fallback=False): - raise HTTPException( - status.HTTP_400_BAD_REQUEST, "XCom deserialization is disabled in configuration." - ) - query = select(XComModel, XComModel.value) - else: - query = select(XComModel) - - query = query.where( - XComModel.dag_id == dag_id, - XComModel.task_id == task_id, - XComModel.key == xcom_key, - XComModel.map_index == map_index, + xcom_query = XComModel.get_many( + run_id=dag_run_id, + key=xcom_key, + task_ids=task_id, + dag_ids=dag_id, + map_indexes=map_index, + session=session, + limit=1, ) - query = query.join(DR, and_(XComModel.dag_id == DR.dag_id, XComModel.run_id == DR.run_id)) - query = query.where(DR.run_id == dag_run_id) - if deserialize: - item = session.execute(query).one_or_none() - else: - item = session.scalars(query).one_or_none() + # We use `BaseXCom.get_many` to fetch XComs directly from the database, bypassing the XCom Backend. + # This avoids deserialization via the backend (e.g., from a remote storage like S3) and instead + # retrieves the raw serialized value from the database. + result = xcom_query.limit(1).first() - if item is None: + if result is None: raise HTTPException(status.HTTP_404_NOT_FOUND, f"XCom entry with key: `{xcom_key}` not found") + item = copy.copy(result) + if deserialize: - from airflow.sdk.execution_time.xcom import XCom + # We use `airflow.serialization.serde` for deserialization here because custom XCom backends (with their own + # serializers/deserializers) are only used on the worker side during task execution. - xcom, value = item - xcom_stub = copy.copy(xcom) - xcom_stub.value = value - xcom_stub.value = XCom.deserialize_value(xcom_stub) - item = xcom_stub + # However, the XCom value is *always* stored in the metadata database as a valid JSON object. + # Therefore, for purposes such as UI display or returning API responses, deserializing with + # `airflow.serialization.serde` is safe and recommended. + from airflow.serialization.serde import deserialize as serde_deserialize + + # full=False ensures that the `item` is deserialized without loading the classes, and it returns a stringified version + item.value = serde_deserialize(XComModel.deserialize_value(item), full=False) + else: + # For native format, return the raw serialized value from the database + # This preserves the JSON string format that the API expects + item.value = result.value if stringify: return XComResponseString.model_validate(item) @@ -136,7 +138,9 @@ def get_xcom_entries( query = select(XComModel) if dag_id != "~": query = query.where(XComModel.dag_id == dag_id) - query = query.join(DR, and_(XComModel.dag_id == DR.dag_id, XComModel.run_id == DR.run_id)) + query = query.join(DR, and_(XComModel.dag_id == DR.dag_id, XComModel.run_id == DR.run_id)).options( + joinedload(XComModel.dag_run).joinedload(DR.dag_model) + ) if task_id != "~": query = query.where(XComModel.task_id == task_id) @@ -181,28 +185,29 @@ def create_xcom_entry( dag_run_id: str, request_body: XComCreateBody, session: SessionDep, - request: Request, + dag_bag: DagBagDep, ) -> XComResponseNative: """Create an XCom entry.""" + from airflow.models.dagrun import DagRun + + dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id)) # Validate DAG ID - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with ID: `{dag_id}` was not found") + dag = get_dag_for_run_or_latest_version(dag_bag, dag_run, dag_id, session) # Validate Task ID try: dag.get_task(task_id) except TaskNotFound: raise HTTPException( - status.HTTP_404_NOT_FOUND, f"Task with ID: `{task_id}` not found in DAG: `{dag_id}`" + status.HTTP_404_NOT_FOUND, f"Task with ID: `{task_id}` not found in dag: `{dag_id}`" ) # Validate DAG Run ID - dag_run = dag.get_dagrun(dag_run_id, session) if not dag_run: - raise HTTPException( - status.HTTP_404_NOT_FOUND, f"DAG Run with ID: `{dag_run_id}` not found for DAG: `{dag_id}`" - ) + if not dag_run: + raise HTTPException( + status.HTTP_404_NOT_FOUND, f"Dag Run with ID: `{dag_run_id}` not found for dag: `{dag_id}`" + ) # Check existing XCom already_existing_query = XComModel.get_many( @@ -249,6 +254,7 @@ def create_xcom_entry( XComModel.map_index == request_body.map_index, ) .limit(1) + .options(joinedload(XComModel.dag_run).joinedload(DR.dag_model)) ) return XComResponseNative.model_validate(xcom) @@ -289,6 +295,7 @@ def update_xcom_entry( XComModel.map_index == patch_body.map_index, ) .limit(1) + .options(joinedload(XComModel.dag_run).joinedload(DR.dag_model)) ) if not xcom_entry: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py index f7b19c53ce2f5..677776574e80d 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py @@ -20,6 +20,7 @@ from airflow.api_fastapi.core_api.routes.ui.assets import assets_router from airflow.api_fastapi.core_api.routes.ui.auth import auth_router from airflow.api_fastapi.core_api.routes.ui.backfills import backfills_router +from airflow.api_fastapi.core_api.routes.ui.calendar import calendar_router from airflow.api_fastapi.core_api.routes.ui.config import config_router from airflow.api_fastapi.core_api.routes.ui.connections import connections_router from airflow.api_fastapi.core_api.routes.ui.dags import dags_router @@ -40,3 +41,4 @@ ui_router.include_router(structure_router) ui_router.include_router(backfills_router) ui_router.include_router(grid_router) +ui_router.include_router(calendar_router) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/assets.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/assets.py index caa3d2b0a12d5..ce476d94ffc07 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/assets.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/assets.py @@ -17,9 +17,10 @@ from __future__ import annotations -from fastapi import Depends, HTTPException, Request, status +from fastapi import Depends, HTTPException, status from sqlalchemy import and_, func, select +from airflow.api_fastapi.common.dagbag import DagBagDep from airflow.api_fastapi.common.db.common import SessionDep from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.security import requires_access_asset, requires_access_dag @@ -35,14 +36,9 @@ ) def next_run_assets( dag_id: str, - request: Request, + dag_bag: DagBagDep, session: SessionDep, ) -> dict: - dag = request.app.state.dag_bag.get_dag(dag_id) - - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"can't find dag {dag_id}") - dag_model = DagModel.get_dagmodel(dag_id, session=session) if dag_model is None: raise HTTPException(status.HTTP_404_NOT_FOUND, f"can't find associated dag_model {dag_id}") diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/backfills.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/backfills.py index add5c536e76e3..e0f310f7043be 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/backfills.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/backfills.py @@ -49,7 +49,7 @@ Depends(requires_access_dag(method="GET")), ], ) -def list_backfills( +def list_backfills_ui( limit: QueryLimit, offset: QueryOffset, order_by: Annotated[ diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py new file mode 100644 index 0000000000000..10a86b2b7c92c --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py @@ -0,0 +1,71 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Annotated, Literal + +from fastapi import Depends + +from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag +from airflow.api_fastapi.common.db.common import SessionDep +from airflow.api_fastapi.common.parameters import RangeFilter, datetime_range_filter_factory +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.ui.calendar import CalendarTimeRangeCollectionResponse +from airflow.api_fastapi.core_api.security import requires_access_dag +from airflow.api_fastapi.core_api.services.ui.calendar import CalendarService +from airflow.models.dagrun import DagRun + +calendar_router = AirflowRouter(prefix="/calendar", tags=["Calendar"]) + + +@calendar_router.get( + "/{dag_id}", + dependencies=[ + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.TASK_INSTANCE, + ) + ), + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.RUN, + ) + ), + ], +) +def get_calendar( + dag_id: str, + session: SessionDep, + dag_bag: DagBagDep, + logical_date: Annotated[RangeFilter, Depends(datetime_range_filter_factory("logical_date", DagRun))], + granularity: Literal["hourly", "daily"] = "daily", +) -> CalendarTimeRangeCollectionResponse: + """Get calendar data for a DAG including historical and planned DAG runs.""" + dag = get_latest_version_of_dag(dag_bag, dag_id, session) + + calendar_service = CalendarService() + + return calendar_service.get_calendar_data( + dag_id=dag_id, + session=session, + dag=dag, + logical_date=logical_date, + granularity=granularity, + ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/config.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/config.py index 11eefab1c8543..f4fe5d5c6658c 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/config.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/config.py @@ -23,45 +23,40 @@ from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.ui.config import ConfigResponse from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc -from airflow.api_fastapi.core_api.security import requires_access_configuration +from airflow.api_fastapi.core_api.security import requires_authenticated from airflow.configuration import conf from airflow.settings import DASHBOARD_UIALERTS +from airflow.utils.log.log_reader import TaskLogReader config_router = AirflowRouter(tags=["Config"]) -WEBSERVER_CONFIG_KEYS = [ - "navbar_color", - "page_size", - "auto_refresh_interval", + +API_CONFIG_KEYS = [ + "enable_swagger_ui", "hide_paused_dags_by_default", - "warn_deployment_exposure", + "page_size", "default_wrap", + "auto_refresh_interval", "require_confirmation_dag_change", - "enable_swagger_ui", - "instance_name_has_markup", - "navbar_text_color", - "navbar_hover_color", - "navbar_text_hover_color", ] @config_router.get( "/config", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), - dependencies=[Depends(requires_access_configuration("GET"))], + dependencies=[Depends(requires_authenticated())], ) def get_configs() -> ConfigResponse: """Get configs for UI.""" - conf_dict = conf.as_dict() - - config = {key: conf_dict["webserver"].get(key) for key in WEBSERVER_CONFIG_KEYS} + config = {key: conf.get("api", key) for key in API_CONFIG_KEYS} + task_log_reader = TaskLogReader() additional_config: dict[str, Any] = { - "instance_name": conf.get("webserver", "instance_name", fallback="Airflow"), - "audit_view_included_events": conf.get("webserver", "audit_view_included_events", fallback=""), - "audit_view_excluded_events": conf.get("webserver", "audit_view_excluded_events", fallback=""), + "instance_name": conf.get("api", "instance_name", fallback="Airflow"), "test_connection": conf.get("core", "test_connection", fallback="Disabled"), "dashboard_alert": DASHBOARD_UIALERTS, + "show_external_log_redirect": task_log_reader.supports_external_link, + "external_log_name": getattr(task_log_reader.log_handler, "log_name", None), } config.update({key: value for key, value in additional_config.items()}) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py index da57a6fc02f11..5b5305dc6d6a7 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py @@ -17,9 +17,10 @@ from __future__ import annotations +from http.client import HTTPException from typing import Annotated -from fastapi import Depends +from fastapi import Depends, status from sqlalchemy import and_, func, select from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity @@ -27,63 +28,117 @@ SessionDep, paginated_select, ) +from airflow.api_fastapi.common.db.dags import generate_dag_with_latest_run_query from airflow.api_fastapi.common.parameters import ( FilterOptionEnum, FilterParam, + QueryBundleNameFilter, + QueryBundleVersionFilter, QueryDagDisplayNamePatternSearch, QueryDagIdPatternSearch, QueryExcludeStaleFilter, + QueryFavoriteFilter, QueryLastDagRunStateFilter, QueryLimit, QueryOffset, QueryOwnersFilter, QueryPausedFilter, QueryTagsFilter, + SortParam, filter_param_factory, ) from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.datamodels.dag_run import DAGRunResponse from airflow.api_fastapi.core_api.datamodels.dags import DAGResponse +from airflow.api_fastapi.core_api.datamodels.ui.dag_runs import DAGRunLightResponse from airflow.api_fastapi.core_api.datamodels.ui.dags import ( DAGWithLatestDagRunsCollectionResponse, DAGWithLatestDagRunsResponse, ) +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import ( ReadableDagsFilterDep, requires_access_dag, ) from airflow.models import DagModel, DagRun -dags_router = AirflowRouter(prefix="/dags", tags=["Dags"]) +dags_router = AirflowRouter(prefix="/dags", tags=["DAG"]) @dags_router.get( - "/recent_dag_runs", + "", response_model_exclude_none=True, dependencies=[ Depends(requires_access_dag(method="GET")), Depends(requires_access_dag("GET", DagAccessEntity.RUN)), ], + operation_id="get_dags_ui", ) -def recent_dag_runs( +def get_dags( limit: QueryLimit, offset: QueryOffset, tags: QueryTagsFilter, owners: QueryOwnersFilter, dag_ids: Annotated[ FilterParam[list[str] | None], - Depends(filter_param_factory(DagRun.dag_id, list[str] | None, FilterOptionEnum.IN, "dag_ids")), + Depends(filter_param_factory(DagModel.dag_id, list[str] | None, FilterOptionEnum.IN, "dag_ids")), ], dag_id_pattern: QueryDagIdPatternSearch, dag_display_name_pattern: QueryDagDisplayNamePatternSearch, exclude_stale: QueryExcludeStaleFilter, paused: QueryPausedFilter, last_dag_run_state: QueryLastDagRunStateFilter, + bundle_name: QueryBundleNameFilter, + bundle_version: QueryBundleVersionFilter, + order_by: Annotated[ + SortParam, + Depends( + SortParam( + ["dag_id", "dag_display_name", "next_dagrun", "state", "start_date"], + DagModel, + {"last_run_state": DagRun.state, "last_run_start_date": DagRun.start_date}, + ).dynamic_depends() + ), + ], + is_favorite: QueryFavoriteFilter, readable_dags_filter: ReadableDagsFilterDep, session: SessionDep, dag_runs_limit: int = 10, ) -> DAGWithLatestDagRunsCollectionResponse: - """Get recent DAG runs.""" + """Get DAGs with recent DagRun.""" + # Fetch DAGs with their latest DagRun and apply filters + query = generate_dag_with_latest_run_query( + max_run_filters=[ + last_dag_run_state, + ], + order_by=order_by, + ) + + dags_select, total_entries = paginated_select( + statement=query, + filters=[ + exclude_stale, + paused, + dag_id_pattern, + dag_ids, + dag_display_name_pattern, + tags, + owners, + last_dag_run_state, + is_favorite, + readable_dags_filter, + bundle_name, + bundle_version, + ], + order_by=order_by, + offset=offset, + limit=limit, + session=session, + ) + + dags = [dag for dag in session.scalars(dags_select)] + + # Populate the last 'dag_runs_limit' DagRuns for each DAG recent_runs_subquery = ( select( DagRun.dag_id, @@ -95,71 +150,82 @@ def recent_dag_runs( ) .label("rank"), ) + .where(DagRun.dag_id.in_([dag.dag_id for dag in dags])) .order_by(DagRun.run_after.desc()) .subquery() ) - dags_with_recent_dag_runs_select = ( + + recent_dag_runs_select = ( select( - DagRun, - DagModel, recent_runs_subquery.c.run_after, + DagRun, ) - .join(DagModel, DagModel.dag_id == recent_runs_subquery.c.dag_id) .join( DagRun, and_( - DagRun.dag_id == DagModel.dag_id, + DagRun.dag_id == recent_runs_subquery.c.dag_id, DagRun.run_after == recent_runs_subquery.c.run_after, ), ) .where(recent_runs_subquery.c.rank <= dag_runs_limit) .group_by( - DagModel.dag_id, recent_runs_subquery.c.run_after, DagRun.run_after, DagRun.id, ) .order_by(recent_runs_subquery.c.run_after.desc()) ) - dags_with_recent_dag_runs_select_filter, _ = paginated_select( - statement=dags_with_recent_dag_runs_select, - filters=[ - exclude_stale, - paused, - dag_id_pattern, - dag_ids, - dag_display_name_pattern, - tags, - owners, - last_dag_run_state, - readable_dags_filter, - ], - order_by=None, - offset=offset, - limit=limit, - ) - dags_with_recent_dag_runs = session.execute(dags_with_recent_dag_runs_select_filter) + + recent_dag_runs = session.execute(recent_dag_runs_select) + # aggregate rows by dag_id - dag_runs_by_dag_id: dict[str, DAGWithLatestDagRunsResponse] = {} + dag_runs_by_dag_id: dict[str, DAGWithLatestDagRunsResponse] = { + dag.dag_id: DAGWithLatestDagRunsResponse.model_validate( + { + **DAGResponse.model_validate(dag).model_dump(), + "asset_expression": dag.asset_expression, + "latest_dag_runs": [], + } + ) + for dag in dags + } - for row in dags_with_recent_dag_runs: - dag_run, dag, *_ = row - dag_id = dag.dag_id + for row in recent_dag_runs: + _, dag_run = row + dag_id = dag_run.dag_id dag_run_response = DAGRunResponse.model_validate(dag_run) - if dag_id not in dag_runs_by_dag_id: - dag_response = DAGResponse.model_validate(dag) - dag_model: DagModel = session.get(DagModel, dag.dag_id) - dag_runs_by_dag_id[dag_id] = DAGWithLatestDagRunsResponse.model_validate( - { - **dag_response.model_dump(), - "asset_expression": dag_model.asset_expression, - "latest_dag_runs": [dag_run_response], - } - ) - else: - dag_runs_by_dag_id[dag_id].latest_dag_runs.append(dag_run_response) + dag_runs_by_dag_id[dag_id].latest_dag_runs.append(dag_run_response) return DAGWithLatestDagRunsCollectionResponse( - total_entries=len(dag_runs_by_dag_id), + total_entries=total_entries, dags=list(dag_runs_by_dag_id.values()), ) + + +@dags_router.get( + "/{dag_id}/latest_run", + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.RUN))], +) +def get_latest_run_info(dag_id: str, session: SessionDep) -> DAGRunLightResponse | None: + """Get latest run.""" + if dag_id == "~": + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + "`~` was supplied as dag_id, but querying multiple dags is not supported.", + ) + return session.execute( + select( + DagRun.id, + DagRun.dag_id, + DagRun.run_id, + DagRun.end_date, + DagRun.logical_date, + DagRun.run_after, + DagRun.start_date, + DagRun.state, + ) + .where(DagRun.dag_id == dag_id) + .order_by(DagRun.run_after.desc()) + .limit(1) + ).one_or_none() diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dashboard.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dashboard.py index e7f6d42c9d4c2..dbbada8cf39c7 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dashboard.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dashboard.py @@ -18,17 +18,22 @@ from fastapi import Depends, status from sqlalchemy import func, select +from sqlalchemy.sql.expression import case, false +from airflow._shared.timezones import timezone from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity from airflow.api_fastapi.common.db.common import SessionDep from airflow.api_fastapi.common.parameters import DateTimeQuery, OptionalDateTimeQuery from airflow.api_fastapi.common.router import AirflowRouter -from airflow.api_fastapi.core_api.datamodels.ui.dashboard import HistoricalMetricDataResponse +from airflow.api_fastapi.core_api.datamodels.ui.dashboard import ( + DashboardDagStatsResponse, + HistoricalMetricDataResponse, +) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc -from airflow.api_fastapi.core_api.security import requires_access_dag +from airflow.api_fastapi.core_api.security import ReadableDagsFilterDep, requires_access_dag +from airflow.models.dag import DagModel from airflow.models.dagrun import DagRun, DagRunType from airflow.models.taskinstance import TaskInstance -from airflow.utils import timezone from airflow.utils.state import DagRunState, TaskInstanceState dashboard_router = AirflowRouter(tags=["Dashboard"], prefix="/dashboard") @@ -45,26 +50,30 @@ def historical_metrics( session: SessionDep, start_date: DateTimeQuery, + readable_dags_filter: ReadableDagsFilterDep, end_date: OptionalDateTimeQuery = None, ) -> HistoricalMetricDataResponse: """Return cluster activity historical metrics.""" current_time = timezone.utcnow() + permitted_dag_ids = readable_dags_filter.value # DagRuns dag_run_types = session.execute( select(DagRun.run_type, func.count(DagRun.run_id)) .where( - DagRun.start_date >= start_date, + func.coalesce(DagRun.start_date, current_time) >= start_date, func.coalesce(DagRun.end_date, current_time) <= func.coalesce(end_date, current_time), ) + .where(DagRun.dag_id.in_(permitted_dag_ids)) .group_by(DagRun.run_type) ).all() dag_run_states = session.execute( select(DagRun.state, func.count(DagRun.run_id)) .where( - DagRun.start_date >= start_date, + func.coalesce(DagRun.start_date, current_time) >= start_date, func.coalesce(DagRun.end_date, current_time) <= func.coalesce(end_date, current_time), ) + .where(DagRun.dag_id.in_(permitted_dag_ids)) .group_by(DagRun.state) ).all() @@ -73,9 +82,10 @@ def historical_metrics( select(TaskInstance.state, func.count(TaskInstance.run_id)) .join(TaskInstance.dag_run) .where( - DagRun.start_date >= start_date, + func.coalesce(DagRun.start_date, current_time) >= start_date, func.coalesce(DagRun.end_date, current_time) <= func.coalesce(end_date, current_time), ) + .where(DagRun.dag_id.in_(permitted_dag_ids)) .group_by(TaskInstance.state) ).all() @@ -97,3 +107,54 @@ def historical_metrics( } return HistoricalMetricDataResponse.model_validate(historical_metrics_response) + + +@dashboard_router.get( + "/dag_stats", + dependencies=[Depends(requires_access_dag(method="GET"))], +) +def dag_stats( + session: SessionDep, + readable_dags_filter: ReadableDagsFilterDep, +) -> DashboardDagStatsResponse: + """Return basic DAG stats with counts of DAGs in various states.""" + permitted_dag_ids = readable_dags_filter.value + latest_dates_subq = ( + select(DagRun.dag_id, func.max(DagRun.logical_date).label("max_logical_date")) + .where(DagRun.logical_date.is_not(None)) + .where(DagRun.dag_id.in_(permitted_dag_ids)) + .group_by(DagRun.dag_id) + .subquery() + ) + + latest_runs = ( + select( + DagModel.dag_id, + DagModel.is_paused, + DagRun.state, + ) + .join(DagModel, DagRun.dag_id == DagModel.dag_id) + .join( + latest_dates_subq, + (DagRun.dag_id == latest_dates_subq.c.dag_id) + & (DagRun.logical_date == latest_dates_subq.c.max_logical_date), + ) + .where(DagRun.dag_id.in_(permitted_dag_ids)) + .cte() + ) + + combined_query = select( + func.coalesce(func.sum(case((latest_runs.c.is_paused == false(), 1))), 0).label("active"), + func.coalesce(func.sum(case((latest_runs.c.state == DagRunState.FAILED, 1))), 0).label("failed"), + func.coalesce(func.sum(case((latest_runs.c.state == DagRunState.RUNNING, 1))), 0).label("running"), + func.coalesce(func.sum(case((latest_runs.c.state == DagRunState.QUEUED, 1))), 0).label("queued"), + ).select_from(latest_runs) + + counts = session.execute(combined_query).first() + + return DashboardDagStatsResponse( + active_dag_count=counts.active, + failed_dag_count=counts.failed, + running_dag_count=counts.running, + queued_dag_count=counts.queued, + ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py index 4ae459b42bb25..fd217f3fbdf9b 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py @@ -18,22 +18,15 @@ from __future__ import annotations import collections -import itertools -from typing import Annotated +from typing import TYPE_CHECKING, Annotated import structlog -from fastapi import Depends, HTTPException, Request, status +from fastapi import Depends, HTTPException, status from sqlalchemy import select -from sqlalchemy.orm import joinedload -from airflow import DAG from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.parameters import ( - QueryDagRunRunTypesFilter, - QueryDagRunStateFilter, - QueryIncludeDownstream, - QueryIncludeUpstream, QueryLimit, QueryOffset, RangeFilter, @@ -41,222 +34,316 @@ datetime_range_filter_factory, ) from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.ui.common import ( + GridNodeResponse, + GridRunsResponse, +) from airflow.api_fastapi.core_api.datamodels.ui.grid import ( - GridDAGRunwithTIs, - GridResponse, + GridTISummaries, ) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import requires_access_dag from airflow.api_fastapi.core_api.services.ui.grid import ( - fill_task_instance_summaries, - get_child_task_map, - get_combined_structure, - get_structure_from_dag, - get_task_group_map, + _find_aggregates, + _merge_node_dicts, ) -from airflow.models import DagRun, TaskInstance from airflow.models.dag_version import DagVersion -from airflow.models.taskinstancehistory import TaskInstanceHistory -from airflow.utils.state import TaskInstanceState +from airflow.models.dagrun import DagRun +from airflow.models.serialized_dag import SerializedDagModel +from airflow.models.taskinstance import TaskInstance +from airflow.sdk.definitions.taskgroup import ( + get_task_group_children_getter, + task_group_to_dict_grid, +) log = structlog.get_logger(logger_name=__name__) grid_router = AirflowRouter(prefix="/grid", tags=["Grid"]) +def _get_latest_serdag(dag_id, session): + serdag = session.scalar( + select(SerializedDagModel) + .where( + SerializedDagModel.dag_id == dag_id, + ) + .order_by(SerializedDagModel.id.desc()) + .limit(1) + ) + if not serdag: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"Dag with id {dag_id} was not found", + ) + return serdag + + +def _get_serdag(dag_id, dag_version_id, session) -> SerializedDagModel | None: + # this is a simplification - we account for structure based on the first task + version = session.scalar(select(DagVersion).where(DagVersion.id == dag_version_id)) + if not version: + version = session.scalar( + select(DagVersion) + .where( + DagVersion.dag_id == dag_id, + ) + .order_by(DagVersion.id) # ascending cus this is mostly for pre-3.0 upgrade + .limit(1) + ) + if not (serdag := version.serialized_dag): + log.error( + "No serialized dag found", + dag_id=dag_id, + version_id=version.id, + version_number=version.version_number, + ) + return serdag + + @grid_router.get( - "/{dag_id}", + "/structure/{dag_id}", responses=create_openapi_http_exception_doc([status.HTTP_400_BAD_REQUEST, status.HTTP_404_NOT_FOUND]), dependencies=[ Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.RUN)), ], + response_model_exclude_none=True, ) -def grid_data( +def get_dag_structure( dag_id: str, session: SessionDep, offset: QueryOffset, - request: Request, - run_type: QueryDagRunRunTypesFilter, - state: QueryDagRunStateFilter, limit: QueryLimit, order_by: Annotated[ SortParam, Depends(SortParam(["run_after", "logical_date", "start_date", "end_date"], DagRun).dynamic_depends()), ], run_after: Annotated[RangeFilter, Depends(datetime_range_filter_factory("run_after", DagRun))], - logical_date: Annotated[RangeFilter, Depends(datetime_range_filter_factory("logical_date", DagRun))], - include_upstream: QueryIncludeUpstream = False, - include_downstream: QueryIncludeDownstream = False, - root: str | None = None, -) -> GridResponse: - """Return grid data.""" - dag: DAG = request.app.state.dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") +) -> list[GridNodeResponse]: + """Return dag structure for grid view.""" + latest_serdag = _get_latest_serdag(dag_id, session) + latest_dag = latest_serdag.dag # Retrieve, sort the previous DAG Runs - base_query = ( - select(DagRun) - .join(DagRun.dag_run_note, isouter=True) - .options(joinedload(DagRun.task_instances).joinedload(TaskInstance.dag_version)) - .options(joinedload(DagRun.task_instances_histories).joinedload(TaskInstanceHistory.dag_version)) - .where(DagRun.dag_id == dag.dag_id) - ) - - # This comparison is to falls to DAG timetable when no order_by is provided - if order_by.value == order_by.get_primary_key_string(): + base_query = select(DagRun.id).where(DagRun.dag_id == dag_id) + # This comparison is to fall back to DAG timetable when no order_by is provided + if order_by.value == [order_by.get_primary_key_string()]: + ordering = list(latest_dag.timetable.run_ordering) order_by = SortParam( - allowed_attrs=[run_ordering for run_ordering in dag.timetable.run_ordering], model=DagRun - ).set_value(dag.timetable.run_ordering[0]) - + allowed_attrs=ordering, + model=DagRun, + ).set_value(ordering) dag_runs_select_filter, _ = paginated_select( statement=base_query, - filters=[ - run_type, - state, - run_after, - logical_date, - ], order_by=order_by, offset=offset, + filters=[run_after], limit=limit, ) + run_ids = list(session.scalars(dag_runs_select_filter)) - dag_runs = list(session.scalars(dag_runs_select_filter).unique()) - - # Check if there are any DAG Runs with given criteria to eliminate unnecessary queries/errors - if not dag_runs: - structure = get_structure_from_dag(dag=dag) - return GridResponse(dag_runs=[], structure=structure) + task_group_sort = get_task_group_children_getter() + if not run_ids: + nodes = [task_group_to_dict_grid(x) for x in task_group_sort(latest_dag.task_group)] + return nodes - # Retrieve, sort and encode the Task Instances - tis_of_dag_runs, _ = paginated_select( - statement=select(TaskInstance) - .join(TaskInstance.task_instance_note, isouter=True) - .where(TaskInstance.dag_id == dag.dag_id), - filters=[], - order_by=SortParam(allowed_attrs=["task_id", "run_id"], model=TaskInstance).set_value("task_id"), - offset=offset, - limit=None, + serdags = session.scalars( + select(SerializedDagModel).where( + SerializedDagModel.dag_version_id.in_( + select(TaskInstance.dag_version_id) + .join(TaskInstance.dag_run) + .where( + DagRun.id.in_(run_ids), + SerializedDagModel.id != latest_serdag.id, + ) + ) + ) ) + merged_nodes: list[GridNodeResponse] = [] + dags = [latest_dag] + for serdag in serdags: + if serdag: + dags.append(serdag.dag) + for dag in dags: + nodes = [task_group_to_dict_grid(x) for x in task_group_sort(dag.task_group)] + _merge_node_dicts(merged_nodes, nodes) - task_instances = session.scalars(tis_of_dag_runs) + return merged_nodes - tis_by_run_id: dict[str, list[TaskInstance]] = collections.defaultdict(list) - for ti in task_instances: - tis_by_run_id[ti.run_id].append(ti) - # Generate Grouped Task Instances - task_node_map_exclude = None - if root: - task_node_map_exclude = get_task_group_map( - dag=dag.partial_subset( - task_ids=root, - include_upstream=include_upstream, - include_downstream=include_downstream, +@grid_router.get( + "/runs/{dag_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_404_NOT_FOUND, + ] + ), + dependencies=[ + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.TASK_INSTANCE, ) - ) + ), + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.RUN, + ) + ), + ], + response_model_exclude_none=True, +) +def get_grid_runs( + dag_id: str, + session: SessionDep, + offset: QueryOffset, + limit: QueryLimit, + order_by: Annotated[ + SortParam, + Depends( + SortParam( + [ + "run_after", + "logical_date", + "start_date", + "end_date", + ], + DagRun, + ).dynamic_depends() + ), + ], + run_after: Annotated[RangeFilter, Depends(datetime_range_filter_factory("run_after", DagRun))], +) -> list[GridRunsResponse]: + """Get info about a run for the grid.""" + # Retrieve, sort the previous DAG Runs + base_query = select( + DagRun.dag_id, + DagRun.run_id, + DagRun.queued_at, + DagRun.start_date, + DagRun.end_date, + DagRun.run_after, + DagRun.state, + DagRun.run_type, + ).where(DagRun.dag_id == dag_id) + + # This comparison is to fall back to DAG timetable when no order_by is provided + if order_by.value == [order_by.get_primary_key_string()]: + latest_serdag = _get_latest_serdag(dag_id, session) + latest_dag = latest_serdag.dag + ordering = list(latest_dag.timetable.run_ordering) + order_by = SortParam( + allowed_attrs=ordering, + model=DagRun, + ).set_value(ordering) + dag_runs_select_filter, _ = paginated_select( + statement=base_query, + order_by=order_by, + offset=offset, + filters=[run_after], + limit=limit, + ) + return session.execute(dag_runs_select_filter) - # Group the Task Instances by Parent Task (TaskGroup or Mapped) and All Task Instances - parent_tis: dict[tuple[str, str], list] = collections.defaultdict(list) - all_tis: dict[tuple[str, str], list] = collections.defaultdict(list) - for tis in tis_by_run_id.values(): - # this is a simplification - we account for structure based on the first task - version = tis[0].dag_version - if not version: - version = session.scalar( - select(DagVersion) - .where( - DagVersion.dag_id == tis[0].dag_id, - ) - .order_by(DagVersion.id) # ascending cus this is mostly for pre-3.0 upgrade - .limit(1) +@grid_router.get( + "/ti_summaries/{dag_id}/{run_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_404_NOT_FOUND, + ] + ), + dependencies=[ + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.TASK_INSTANCE, ) - if not version.serialized_dag: - log.error( - "No serialized dag found", - dag_id=tis[0].dag_id, - version_id=version.id, - version_number=version.version_number, + ), + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.RUN, ) - continue - run_dag = version.serialized_dag.dag - task_node_map = get_task_group_map(dag=run_dag) - for ti in tis: - # Skip the Task Instances if upstream/downstream filtering is applied or if the task was removed. - if ( - task_node_map_exclude and ti.task_id not in task_node_map_exclude - ) or ti.state == TaskInstanceState.REMOVED: - continue + ), + ], +) +def get_grid_ti_summaries( + dag_id: str, + run_id: str, + session: SessionDep, +) -> GridTISummaries: + """ + Get states for TIs / "groups" of TIs. - # Populate the Grouped Task Instances (All Task Instances except the Parent Task Instances) - if ti.task_id in get_child_task_map( - parent_task_id=task_node_map[ti.task_id]["parent_id"], task_node_map=task_node_map - ): - all_tis[(ti.task_id, ti.run_id)].append(ti) - # Populate the Parent Task Instances - parent_id = task_node_map[ti.task_id]["parent_id"] - if not parent_id and task_node_map[ti.task_id]["is_group"]: - parent_tis[(ti.task_id, ti.run_id)].append(ti) - elif parent_id and task_node_map[parent_id]["is_group"]: - parent_tis[(parent_id, ti.run_id)].append(ti) + Essentially this is to know what color to put in the squares in the grid. - # Clear task_node_map_exclude to free up memory - if task_node_map_exclude: - task_node_map_exclude.clear() + The tricky part here is that we aggregate the state for groups and mapped tasks. - task_node_map = get_task_group_map(dag=dag) - # Extend subgroup task instances to parent task instances to calculate the aggregates states - task_group_map = {k: v for k, v in task_node_map.items() if v["is_group"]} - parent_tis.update( - { - (task_id_parent, run_id): parent_tis[(task_id_parent, run_id)] + parent_tis[(task_id, run_id)] - for task_id, task_map in task_group_map.items() - if task_map["is_group"] - for (task_id_parent, run_id), tis in list(parent_tis.items()) - if task_id_parent == task_map["parent_id"] - } - ) - # Create the Task Instance Summaries to be used in the Grid Response - task_instance_summaries: dict[str, list] = { - run_id: [] for _, run_id in itertools.chain(parent_tis, all_tis) - } + We don't add all the TIs for mapped TIs -- we only add one entry for the mapped task and + its state is an aggregate of its TI states. - # Fill the Task Instance Summaries for the Parent and Grouped Task Instances. - # First the Parent Task Instances because they are used in the Grouped Task Instances - fill_task_instance_summaries( - grouped_task_instances=parent_tis, - task_instance_summaries_to_fill=task_instance_summaries, - session=session, + And for task groups, we add a "task" for that which is not really a task but is just + an entry that represents the group (so that we can show a filled in box when the group + is not expanded) and its state is an agg of those within it. + """ + tis_of_dag_runs, _ = paginated_select( + statement=( + select( + TaskInstance.task_id, + TaskInstance.state, + TaskInstance.dag_version_id, + TaskInstance.start_date, + TaskInstance.end_date, + ) + .where(TaskInstance.dag_id == dag_id) + .where( + TaskInstance.run_id == run_id, + ) + ), + filters=[], + order_by=SortParam(allowed_attrs=["task_id", "run_id"], model=TaskInstance).set_value(["task_id"]), + limit=None, + return_total_entries=False, ) - # Fill the Task Instance Summaries for the Grouped Task Instances - fill_task_instance_summaries( - grouped_task_instances=all_tis, - task_instance_summaries_to_fill=task_instance_summaries, + task_instances = list(session.execute(tis_of_dag_runs)) + if not task_instances: + raise HTTPException( + status.HTTP_404_NOT_FOUND, f"No task instances for dag_id={dag_id} run_id={run_id}" + ) + ti_details = collections.defaultdict(list) + for ti in task_instances: + ti_details[ti.task_id].append( + { + "state": ti.state, + "start_date": ti.start_date, + "end_date": ti.end_date, + } + ) + serdag = _get_serdag( + dag_id=dag_id, + dag_version_id=task_instances[0].dag_version_id, session=session, ) + if TYPE_CHECKING: + assert serdag - # Aggregate the Task Instances by DAG Run - grid_dag_runs = [ - GridDAGRunwithTIs( - run_id=dag_run.run_id, - queued_at=dag_run.queued_at, - start_date=dag_run.start_date, - end_date=dag_run.end_date, - run_after=dag_run.run_after, - logical_date=dag_run.logical_date, - state=dag_run.state, - run_type=dag_run.run_type, - data_interval_start=dag_run.data_interval_start, - data_interval_end=dag_run.data_interval_end, - note=dag_run.note, - task_instances=task_instance_summaries.get(dag_run.run_id, []), - ) - for dag_run in dag_runs - ] - - flat_tis = itertools.chain.from_iterable(tis_by_run_id.values()) - structure = get_combined_structure(task_instances=flat_tis, session=session) + def get_node_sumaries(): + for node in _find_aggregates( + node=serdag.dag.task_group, + parent_node=None, + ti_details=ti_details, + ): + if node["type"] == "task": + node["child_states"] = None + node["min_start_date"] = None + node["max_end_date"] = None + yield node - return GridResponse(dag_runs=grid_dag_runs, structure=structure) + return { # type: ignore[return-value] + "run_id": run_id, + "dag_id": dag_id, + "task_instances": list(get_node_sumaries()), + } diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py index 05fb79bd0bf29..c308ae214322c 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py @@ -26,11 +26,14 @@ from airflow.api_fastapi.core_api.datamodels.ui.structure import StructureDataResponse from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import requires_access_dag -from airflow.api_fastapi.core_api.services.ui.structure import get_upstream_assets +from airflow.api_fastapi.core_api.services.ui.structure import ( + bind_output_assets_to_tasks, + get_upstream_assets, +) from airflow.models.dag_version import DagVersion from airflow.models.serialized_dag import SerializedDagModel +from airflow.sdk.definitions.taskgroup import task_group_to_dict from airflow.utils.dag_edges import dag_edges -from airflow.utils.task_group import task_group_to_dict structure_router = AirflowRouter(tags=["Structure"], prefix="/structure") @@ -119,7 +122,15 @@ def structure_data( elif ( dependency.target == dependency.dependency_type or dependency.source == dag_id ) and exit_node_ref: - end_edges.append({"source_id": exit_node_ref["id"], "target_id": dependency.node_id}) + end_edges.append( + { + "source_id": exit_node_ref["id"], + "target_id": dependency.node_id, + "resolved_from_alias": dependency.source.replace("asset-alias:", "", 1) + if dependency.source.startswith("asset-alias:") + else None, + } + ) # Add nodes nodes.append( @@ -130,13 +141,15 @@ def structure_data( } ) - if asset_expression := serialized_dag.dag_model.asset_expression: + if (asset_expression := serialized_dag.dag_model.asset_expression) and entry_node_ref: upstream_asset_nodes, upstream_asset_edges = get_upstream_assets( asset_expression, entry_node_ref["id"] ) data["nodes"] += upstream_asset_nodes - data["edges"] = upstream_asset_edges + data["edges"] += upstream_asset_edges + + data["edges"] += start_edges + end_edges - data["edges"] += start_edges + edges + end_edges + bind_output_assets_to_tasks(data["edges"], serialized_dag, version_number, session) return StructureDataResponse(**data) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/security.py b/airflow-core/src/airflow/api_fastapi/core_api/security.py index e57a6543faf46..d4f6e11e7e919 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/security.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/security.py @@ -16,13 +16,15 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from pathlib import Path -from typing import TYPE_CHECKING, Annotated, Callable +from typing import TYPE_CHECKING, Annotated from urllib.parse import ParseResult, urljoin, urlparse from fastapi import Depends, HTTPException, Request, status -from fastapi.security import OAuth2PasswordBearer +from fastapi.security import HTTPBearer, OAuth2PasswordBearer from jwt import ExpiredSignatureError, InvalidTokenError +from pydantic import NonNegativeInt from airflow.api_fastapi.app import get_auth_manager from airflow.api_fastapi.auth.managers.models.base_user import BaseUser @@ -46,6 +48,7 @@ from airflow.models.xcom import XComModel if TYPE_CHECKING: + from fastapi.security import HTTPAuthorizationCredentials from sqlalchemy.sql import Select from airflow.api_fastapi.auth.managers.base_auth_manager import BaseAuthManager, ResourceMethod @@ -60,36 +63,36 @@ "information (such as user identity and scope) to authenticate subsequent requests. " "To learn more about Airflow public API authentication, please read https://airflow.apache.org/docs/apache-airflow/stable/security/api.html." ) -oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token", description=auth_description) +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token", description=auth_description, auto_error=False) +bearer_scheme = HTTPBearer(auto_error=False) -async def get_user(token_str: Annotated[str, Depends(oauth2_scheme)]) -> BaseUser: +async def resolve_user_from_token(token_str: str | None) -> BaseUser: + if not token_str: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Not authenticated") + try: return await get_auth_manager().get_user_from_token(token_str) except ExpiredSignatureError: - raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Token Expired") + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Token Expired") except InvalidTokenError: - raise HTTPException(status.HTTP_403_FORBIDDEN, "Invalid JWT token") - - -GetUserDep = Annotated[BaseUser, Depends(get_user)] + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid JWT token") -async def get_user_with_exception_handling(request: Request) -> BaseUser | None: - # Currently the UI does not support JWT authentication, this method defines a fallback if no token is provided by the UI. - # We can remove this method when issue https://github.com/apache/airflow/issues/44884 is done. +async def get_user( + oauth_token: str | None = Depends(oauth2_scheme), + bearer_credentials: HTTPAuthorizationCredentials | None = Depends(bearer_scheme), +) -> BaseUser: token_str = None + if bearer_credentials and bearer_credentials.scheme.lower() == "bearer": + token_str = bearer_credentials.credentials + elif oauth_token: + token_str = oauth_token - # TODO remove try-except when authentication integrated everywhere, safeguard for non integrated clients and endpoints - try: - token_str = await oauth2_scheme(request) - except HTTPException as e: - if e.status_code == status.HTTP_401_UNAUTHORIZED: - return None + return await resolve_user_from_token(token_str) - if not token_str: # Handle None or empty token - return None - return await get_user(token_str) + +GetUserDep = Annotated[BaseUser, Depends(get_user)] def requires_access_dag( @@ -193,12 +196,12 @@ def depends_permitted_dags_filter( ] -def requires_access_backfill(method: ResourceMethod) -> Callable: +def requires_access_backfill(method: ResourceMethod) -> Callable[[Request, BaseUser], None]: def inner( request: Request, - user: Annotated[BaseUser | None, Depends(get_user)] = None, + user: GetUserDep, ) -> None: - backfill_id: str | None = request.path_params.get("backfill_id") + backfill_id: NonNegativeInt | None = request.path_params.get("backfill_id") _requires_access( is_authorized_callback=lambda: get_auth_manager().is_authorized_backfill( @@ -241,10 +244,10 @@ def inner( return inner -def requires_access_configuration(method: ResourceMethod) -> Callable[[Request, BaseUser | None], None]: +def requires_access_configuration(method: ResourceMethod) -> Callable[[Request, BaseUser], None]: def inner( request: Request, - user: Annotated[BaseUser | None, Depends(get_user)] = None, + user: GetUserDep, ) -> None: section: str | None = request.query_params.get("section") or request.path_params.get("section") @@ -259,10 +262,10 @@ def inner( return inner -def requires_access_variable(method: ResourceMethod) -> Callable[[Request, BaseUser | None], None]: +def requires_access_variable(method: ResourceMethod) -> Callable[[Request, BaseUser], None]: def inner( request: Request, - user: Annotated[BaseUser | None, Depends(get_user)] = None, + user: GetUserDep, ) -> None: variable_key: str | None = request.path_params.get("variable_key") @@ -275,10 +278,10 @@ def inner( return inner -def requires_access_asset(method: ResourceMethod) -> Callable: +def requires_access_asset(method: ResourceMethod) -> Callable[[Request, BaseUser], None]: def inner( request: Request, - user: Annotated[BaseUser | None, Depends(get_user)] = None, + user: GetUserDep, ) -> None: asset_id = request.path_params.get("asset_id") @@ -291,10 +294,10 @@ def inner( return inner -def requires_access_view(access_view: AccessView) -> Callable[[Request, BaseUser | None], None]: +def requires_access_view(access_view: AccessView) -> Callable[[Request, BaseUser], None]: def inner( request: Request, - user: Annotated[BaseUser | None, Depends(get_user)] = None, + user: GetUserDep, ) -> None: _requires_access( is_authorized_callback=lambda: get_auth_manager().is_authorized_view( @@ -305,10 +308,10 @@ def inner( return inner -def requires_access_asset_alias(method: ResourceMethod) -> Callable: +def requires_access_asset_alias(method: ResourceMethod) -> Callable[[Request, BaseUser], None]: def inner( request: Request, - user: Annotated[BaseUser | None, Depends(get_user)] = None, + user: GetUserDep, ) -> None: asset_alias_id: str | None = request.path_params.get("asset_alias_id") @@ -321,6 +324,18 @@ def inner( return inner +def requires_authenticated() -> Callable: + """Just ensure the user is authenticated - no need to check any specific permissions.""" + + def inner( + request: Request, + user: GetUserDep, + ) -> None: + pass + + return inner + + def _requires_access( *, is_authorized_callback: Callable[[], bool], diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py index dc1bd65e95fc0..94e1157d1c0c0 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py @@ -50,11 +50,11 @@ def handle_request(self) -> BulkResponse: results[action.action.value] = BulkActionResponse() if action.action == BulkAction.CREATE: - self.handle_bulk_create(action, results[action.action.value]) # type: ignore + self.handle_bulk_create(action, results[action.action.value]) elif action.action == BulkAction.UPDATE: - self.handle_bulk_update(action, results[action.action.value]) # type: ignore + self.handle_bulk_update(action, results[action.action.value]) elif action.action == BulkAction.DELETE: - self.handle_bulk_delete(action, results[action.action.value]) # type: ignore + self.handle_bulk_delete(action, results[action.action.value]) return BulkResponse(**results) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/config.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/config.py new file mode 100644 index 0000000000000..a23c957074bbd --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/config.py @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import HTTPException, status +from fastapi.responses import Response + +from airflow.api_fastapi.common.types import Mimetype +from airflow.api_fastapi.core_api.datamodels.config import Config +from airflow.configuration import conf + + +def _check_expose_config() -> bool: + display_sensitive: bool | None = None + if conf.get("api", "expose_config").lower() == "non-sensitive-only": + expose_config = True + display_sensitive = False + else: + expose_config = conf.getboolean("api", "expose_config") + display_sensitive = True + + if not expose_config: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Your Airflow administrator chose not to expose the configuration, most likely for security reasons.", + ) + return display_sensitive + + +def _response_based_on_accept(accept: Mimetype, config: Config): + if accept == Mimetype.TEXT: + return Response(content=config.text_format, media_type=Mimetype.TEXT) + return config diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/connections.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/connections.py index 4c236cf9756be..05ab88ab90c1a 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/public/connections.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/connections.py @@ -17,6 +17,8 @@ from __future__ import annotations +import json + from fastapi import HTTPException, status from pydantic import ValidationError from sqlalchemy import select @@ -32,6 +34,7 @@ from airflow.api_fastapi.core_api.datamodels.connections import ConnectionBody from airflow.api_fastapi.core_api.services.public.common import BulkService from airflow.models.connection import Connection +from airflow.sdk.execution_time.secrets_masker import merge def update_orm_from_pydantic( @@ -56,11 +59,23 @@ def update_orm_from_pydantic( if (not update_mask and "password" in pydantic_conn.model_fields_set) or ( update_mask and "password" in update_mask ): - orm_conn.set_password(pydantic_conn.password) + if pydantic_conn.password is None: + orm_conn.set_password(pydantic_conn.password) + else: + merged_password = merge(pydantic_conn.password, orm_conn.password, "password") + orm_conn.set_password(merged_password) if (not update_mask and "extra" in pydantic_conn.model_fields_set) or ( update_mask and "extra" in update_mask ): - orm_conn.set_extra(pydantic_conn.extra) + if pydantic_conn.extra is None or orm_conn.extra is None: + orm_conn.set_extra(pydantic_conn.extra) + return + try: + merged_extra = merge(json.loads(pydantic_conn.extra), json.loads(orm_conn.extra)) + orm_conn.set_extra(json.dumps(merged_extra)) + except json.JSONDecodeError: + # We can't merge fields in an unstructured `extra` + orm_conn.set_extra(pydantic_conn.extra) class BulkConnectionService(BulkService[ConnectionBody]): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py new file mode 100644 index 0000000000000..259389e799494 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py @@ -0,0 +1,85 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import asyncio +import itertools +import json +import operator +from typing import TYPE_CHECKING, Any + +import attrs +from sqlalchemy import select + +from airflow.models.dagrun import DagRun +from airflow.models.xcom import XCOM_RETURN_KEY, XComModel +from airflow.utils.session import create_session_async +from airflow.utils.state import State + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Iterator + + +@attrs.define +class DagRunWaiter: + """Wait for the specified dag run to finish, and collect info from it.""" + + dag_id: str + run_id: str + interval: float + result_task_ids: list[str] | None + + async def _get_dag_run(self) -> DagRun: + async with create_session_async() as session: + return await session.scalar(select(DagRun).filter_by(dag_id=self.dag_id, run_id=self.run_id)) + + def _serialize_xcoms(self) -> dict[str, Any]: + xcom_query = XComModel.get_many( + run_id=self.run_id, + key=XCOM_RETURN_KEY, + task_ids=self.result_task_ids, + dag_ids=self.dag_id, + ) + xcom_query = xcom_query.order_by(XComModel.task_id, XComModel.map_index) + + def _group_xcoms(g: Iterator[XComModel]) -> Any: + entries = list(g) + if len(entries) == 1 and entries[0].map_index < 0: # Unpack non-mapped task xcom. + return entries[0].value + return [entry.value for entry in entries] # Task is mapped; return all xcoms in a list. + + return { + task_id: _group_xcoms(g) + for task_id, g in itertools.groupby(xcom_query, key=operator.attrgetter("task_id")) + } + + def _serialize_response(self, dag_run: DagRun) -> str: + resp = {"state": dag_run.state} + if dag_run.state not in State.finished_dr_states: + return json.dumps(resp) + if self.result_task_ids: + resp["results"] = self._serialize_xcoms() + return json.dumps(resp) + + async def wait(self) -> AsyncGenerator[str, None]: + yield self._serialize_response(dag_run := await self._get_dag_run()) + yield "\n" + while dag_run.state not in State.finished_dr_states: + await asyncio.sleep(self.interval) + yield self._serialize_response(dag_run := await self._get_dag_run()) + yield "\n" diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/providers.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/providers.py new file mode 100644 index 0000000000000..2f9fa3a1b86d4 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/providers.py @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import re + +from airflow.api_fastapi.core_api.datamodels.providers import ProviderResponse +from airflow.providers_manager import ProviderInfo + + +def _remove_rst_syntax(value: str) -> str: + return re.sub("[`_<>]", "", value.strip(" \n.")) + + +def _provider_mapper(provider: ProviderInfo) -> ProviderResponse: + return ProviderResponse( + package_name=provider.data["package-name"], + description=_remove_rst_syntax(provider.data["description"]), + version=provider.version, + ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/task_instances.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/task_instances.py new file mode 100644 index 0000000000000..ea81ea52916e2 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/task_instances.py @@ -0,0 +1,283 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import structlog +from fastapi import HTTPException, Query, status +from fastapi.exceptions import RequestValidationError +from pydantic import ValidationError +from sqlalchemy import select +from sqlalchemy.orm import joinedload +from sqlalchemy.orm.session import Session + +from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag +from airflow.api_fastapi.common.db.common import SessionDep +from airflow.api_fastapi.core_api.datamodels.common import ( + BulkActionNotOnExistence, + BulkActionResponse, + BulkBody, + BulkCreateAction, + BulkDeleteAction, + BulkUpdateAction, +) +from airflow.api_fastapi.core_api.datamodels.task_instances import BulkTaskInstanceBody, PatchTaskInstanceBody +from airflow.api_fastapi.core_api.security import GetUserDep +from airflow.api_fastapi.core_api.services.public.common import BulkService +from airflow.listeners.listener import get_listener_manager +from airflow.models.dag import DAG +from airflow.models.taskinstance import TaskInstance as TI +from airflow.utils.state import TaskInstanceState + +log = structlog.get_logger(__name__) + + +def _patch_ti_validate_request( + dag_id: str, + dag_run_id: str, + task_id: str, + dag_bag: DagBagDep, + body: PatchTaskInstanceBody, + session: SessionDep, + map_index: int | None = -1, + update_mask: list[str] | None = Query(None), +) -> tuple[DAG, list[TI], dict]: + dag = get_latest_version_of_dag(dag_bag, dag_id, session) + + if not dag.has_task(task_id): + raise HTTPException(status.HTTP_404_NOT_FOUND, f"Task '{task_id}' not found in DAG '{dag_id}'") + + query = ( + select(TI) + .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id) + .join(TI.dag_run) + .options(joinedload(TI.rendered_task_instance_fields)) + ) + if map_index is not None: + query = query.where(TI.map_index == map_index) + else: + query = query.order_by(TI.map_index) + + tis = session.scalars(query).all() + + err_msg_404 = ( + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + if len(tis) == 0: + raise HTTPException(status.HTTP_404_NOT_FOUND, err_msg_404) + + fields_to_update = body.model_fields_set + if update_mask: + fields_to_update = fields_to_update.intersection(update_mask) + else: + try: + PatchTaskInstanceBody.model_validate(body) + except ValidationError as e: + raise RequestValidationError(errors=e.errors()) + + return dag, list(tis), body.model_dump(include=fields_to_update, by_alias=True) + + +def _patch_task_instance_state( + task_id: str, + dag_run_id: str, + dag: DAG, + task_instance_body: BulkTaskInstanceBody | PatchTaskInstanceBody, + data: dict, + session: Session, +) -> None: + map_index = getattr(task_instance_body, "map_index", None) + map_indexes = None if map_index is None else [map_index] + + updated_tis = dag.set_task_instance_state( + task_id=task_id, + run_id=dag_run_id, + map_indexes=map_indexes, + state=data["new_state"], + upstream=task_instance_body.include_upstream, + downstream=task_instance_body.include_downstream, + future=task_instance_body.include_future, + past=task_instance_body.include_past, + commit=True, + session=session, + ) + if not updated_tis: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Task id {task_id} is already in {data['new_state']} state", + ) + + for ti in updated_tis: + try: + if data["new_state"] == TaskInstanceState.SUCCESS: + get_listener_manager().hook.on_task_instance_success(previous_state=None, task_instance=ti) + elif data["new_state"] == TaskInstanceState.FAILED: + get_listener_manager().hook.on_task_instance_failed( + previous_state=None, + task_instance=ti, + error=f"TaskInstance's state was manually set to `{TaskInstanceState.FAILED}`.", + ) + except Exception: + log.exception("error calling listener") + + +def _patch_task_instance_note( + task_instance_body: BulkTaskInstanceBody | PatchTaskInstanceBody, + tis: list[TI], + user: GetUserDep, + update_mask: list[str] | None = Query(None), +) -> None: + for ti in tis: + if update_mask or task_instance_body.note is not None: + if ti.task_instance_note is None: + ti.note = (task_instance_body.note, user.get_id()) + else: + ti.task_instance_note.content = task_instance_body.note + ti.task_instance_note.user_id = user.get_id() + + +class BulkTaskInstanceService(BulkService[BulkTaskInstanceBody]): + """Service for handling bulk operations on task instances.""" + + def __init__( + self, + session: Session, + request: BulkBody[BulkTaskInstanceBody], + dag_id: str, + dag_run_id: str, + dag_bag: DagBagDep, + user: GetUserDep, + ): + super().__init__(session, request) + self.dag_id = dag_id + self.dag_run_id = dag_run_id + self.dag_bag = dag_bag + self.user = user + + def categorize_task_instances( + self, task_ids: set[tuple[str, int]] + ) -> tuple[dict[tuple[str, int], TI], set[tuple[str, int]], set[tuple[str, int]]]: + """ + Categorize the given task_ids into matched_task_keys and not_found_task_keys based on existing task_ids. + + :param task_ids: set of task_ids + :return: tuple of (task_instances_map, matched_task_keys, not_found_task_keys) + """ + query = select(TI).where( + TI.dag_id == self.dag_id, + TI.run_id == self.dag_run_id, + TI.task_id.in_([task_id for task_id, _ in task_ids]), + ) + task_instances = self.session.scalars(query).all() + task_instances_map = { + (ti.task_id, ti.map_index if ti.map_index is not None else -1): ti for ti in task_instances + } + matched_task_keys = {(task_id, map_index) for (task_id, map_index) in task_instances_map.keys()} + not_found_task_keys = {(task_id, map_index) for task_id, map_index in task_ids} - matched_task_keys + return task_instances_map, matched_task_keys, not_found_task_keys + + def handle_bulk_create( + self, action: BulkCreateAction[BulkTaskInstanceBody], results: BulkActionResponse + ) -> None: + results.errors.append( + { + "error": "Task instances bulk create is not supported", + "status_code": status.HTTP_405_METHOD_NOT_ALLOWED, + } + ) + + def handle_bulk_update( + self, action: BulkUpdateAction[BulkTaskInstanceBody], results: BulkActionResponse + ) -> None: + """Bulk Update Task Instances.""" + to_update_task_keys = { + (task_instance.task_id, task_instance.map_index if task_instance.map_index is not None else -1) + for task_instance in action.entities + } + _, _, not_found_task_keys = self.categorize_task_instances(to_update_task_keys) + + try: + for task_instance_body in action.entities: + task_key = ( + task_instance_body.task_id, + task_instance_body.map_index if task_instance_body.map_index is not None else -1, + ) + + if task_key in not_found_task_keys: + if action.action_on_non_existence == BulkActionNotOnExistence.FAIL: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"The Task Instance with dag_id: `{self.dag_id}`, run_id: `{self.dag_run_id}`, task_id: `{task_instance_body.task_id}` and map_index: `{task_instance_body.map_index}` was not found", + ) + if action.action_on_non_existence == BulkActionNotOnExistence.SKIP: + continue + + dag, tis, data = _patch_ti_validate_request( + dag_id=self.dag_id, + dag_run_id=self.dag_run_id, + task_id=task_instance_body.task_id, + dag_bag=self.dag_bag, + body=task_instance_body, + session=self.session, + map_index=task_instance_body.map_index, + update_mask=None, + ) + + for key, _ in data.items(): + if key == "new_state": + _patch_task_instance_state( + task_id=task_instance_body.task_id, + dag_run_id=self.dag_run_id, + dag=dag, + task_instance_body=task_instance_body, + session=self.session, + data=data, + ) + elif key == "note": + _patch_task_instance_note( + task_instance_body=task_instance_body, tis=tis, user=self.user + ) + + results.success.append(task_instance_body.task_id) + except ValidationError as e: + results.errors.append({"error": f"{e.errors()}"}) + except HTTPException as e: + results.errors.append({"error": f"{e.detail}", "status_code": e.status_code}) + + def handle_bulk_delete( + self, action: BulkDeleteAction[BulkTaskInstanceBody], results: BulkActionResponse + ) -> None: + """Bulk delete task instances.""" + to_delete_task_keys = set((task_id, -1) for task_id in action.entities) + _, matched_task_keys, not_found_task_keys = self.categorize_task_instances(to_delete_task_keys) + not_found_task_ids = [task_id for task_id, _ in not_found_task_keys] + + try: + if action.action_on_non_existence == BulkActionNotOnExistence.FAIL and not_found_task_keys: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"The task instances with these task_ids: {not_found_task_ids} were not found", + ) + + for task_id, _ in matched_task_keys: + existing_task_instance = self.session.scalar(select(TI).where(TI.task_id == task_id).limit(1)) + if existing_task_instance: + self.session.delete(existing_task_instance) + results.success.append(task_id) + + except HTTPException as e: + results.errors.append({"error": f"{e.detail}", "status_code": e.status_code}) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py index 77ab8568cbc53..0208ea1a0a5f7 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/variables.py @@ -64,11 +64,13 @@ def handle_bulk_create(self, action: BulkCreateAction, results: BulkActionRespon for variable in action.entities: if variable.key in create_keys: + should_serialize_json = isinstance(variable.value, (dict, list)) Variable.set( key=variable.key, value=variable.value, description=variable.description, session=self.session, + serialize_json=should_serialize_json, ) results.success.append(variable.key) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py new file mode 100644 index 0000000000000..28b7f32ead918 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py @@ -0,0 +1,325 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import collections +import datetime +from collections.abc import Iterator +from typing import Literal, cast + +import sqlalchemy as sa +import structlog +from croniter.croniter import croniter +from pendulum import DateTime +from sqlalchemy.engine import Row +from sqlalchemy.orm import Session + +from airflow._shared.timezones import timezone +from airflow.api_fastapi.common.parameters import RangeFilter +from airflow.api_fastapi.core_api.datamodels.ui.calendar import ( + CalendarTimeRangeCollectionResponse, + CalendarTimeRangeResponse, +) +from airflow.models.dag import DAG +from airflow.models.dagrun import DagRun +from airflow.timetables._cron import CronMixin +from airflow.timetables.base import DataInterval, TimeRestriction +from airflow.timetables.simple import ContinuousTimetable + +log = structlog.get_logger(logger_name=__name__) + + +class CalendarService: + """Service class for calendar-related operations.""" + + MAX_PLANNED_RUNS: int = 2000 + + def get_calendar_data( + self, + dag_id: str, + session: Session, + dag: DAG, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"] = "daily", + ) -> CalendarTimeRangeCollectionResponse: + """ + Get calendar data for a DAG including historical and planned runs. + + Args: + dag_id: The DAG ID + session: Database session + dag: The DAG object + logical_date: Date range filter + granularity: Time granularity ("hourly" or "daily") + + Returns: + List of calendar time range results + """ + historical_data, raw_dag_states = self._get_historical_dag_runs( + dag_id, + session, + logical_date, + granularity, + ) + + planned_data = self._get_planned_dag_runs(dag, raw_dag_states, logical_date, granularity) + + all_data = historical_data + planned_data + return CalendarTimeRangeCollectionResponse( + total_entries=len(all_data), + dag_runs=all_data, + ) + + def _get_historical_dag_runs( + self, + dag_id: str, + session: Session, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> tuple[list[CalendarTimeRangeResponse], list[Row]]: + """Get historical DAG runs from the database.""" + dialect = session.bind.dialect.name + + time_expression = self._get_time_truncation_expression(DagRun.logical_date, granularity, dialect) + + select_stmt = ( + sa.select( + time_expression.label("datetime"), + DagRun.state, + sa.func.max(DagRun.data_interval_start).label("data_interval_start"), + sa.func.max(DagRun.data_interval_end).label("data_interval_end"), + sa.func.count("*").label("count"), + ) + .where(DagRun.dag_id == dag_id) + .group_by(time_expression, DagRun.state) + .order_by(time_expression.asc()) + ) + + select_stmt = logical_date.to_orm(select_stmt) + dag_states = session.execute(select_stmt).all() + + calendar_results = [ + CalendarTimeRangeResponse( + # ds.datetime in sqlite and mysql is a string, in postgresql it is a datetime + date=ds.datetime, + state=ds.state, + count=ds.count, + ) + for ds in dag_states + ] + + return calendar_results, dag_states + + def _get_planned_dag_runs( + self, + dag: DAG, + raw_dag_states: list[Row], + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> list[CalendarTimeRangeResponse]: + """Get planned DAG runs based on the DAG's timetable.""" + if not self._should_calculate_planned_runs(dag, raw_dag_states): + return [] + + last_data_interval = self._get_last_data_interval(raw_dag_states) + if not last_data_interval: + return [] + + year = last_data_interval.end.year + restriction = TimeRestriction( + timezone.coerce_datetime(dag.start_date) if dag.start_date else None, + timezone.coerce_datetime(dag.end_date) if dag.end_date else None, + False, + ) + + if isinstance(dag.timetable, CronMixin): + return self._calculate_cron_planned_runs(dag, last_data_interval, year, logical_date, granularity) + return self._calculate_timetable_planned_runs( + dag, last_data_interval, year, restriction, logical_date, granularity + ) + + def _should_calculate_planned_runs(self, dag: DAG, raw_dag_states: list[Row]) -> bool: + """Check if we should calculate planned runs.""" + return ( + bool(raw_dag_states) + and bool(raw_dag_states[-1].data_interval_start) + and bool(raw_dag_states[-1].data_interval_end) + and not isinstance(dag.timetable, ContinuousTimetable) + ) + + def _get_last_data_interval(self, raw_dag_states: list[Row]) -> DataInterval | None: + """Extract the last data interval from raw database results.""" + if not raw_dag_states: + return None + + last_state = raw_dag_states[-1] + if not (last_state.data_interval_start and last_state.data_interval_end): + return None + + return DataInterval( + timezone.coerce_datetime(last_state.data_interval_start), + timezone.coerce_datetime(last_state.data_interval_end), + ) + + def _calculate_cron_planned_runs( + self, + dag: DAG, + last_data_interval: DataInterval, + year: int, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> list[CalendarTimeRangeResponse]: + """Calculate planned runs for cron-based timetables.""" + dates: dict[datetime.datetime, int] = collections.Counter() + + dates_iter: Iterator[datetime.datetime | None] = croniter( + cast("CronMixin", dag.timetable)._expression, + start_time=last_data_interval.end, + ret_type=datetime.datetime, + ) + + for dt in dates_iter: + if dt is None or dt.year != year: + break + if dag.end_date and dt > dag.end_date: + break + if not self._is_date_in_range(dt, logical_date): + continue + + dates[self._truncate_datetime_for_granularity(dt, granularity)] += 1 + + return [ + CalendarTimeRangeResponse(date=dt, state="planned", count=count) for dt, count in dates.items() + ] + + def _calculate_timetable_planned_runs( + self, + dag: DAG, + last_data_interval: DataInterval, + year: int, + restriction: TimeRestriction, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> list[CalendarTimeRangeResponse]: + """Calculate planned runs for generic timetables.""" + dates: dict[datetime.datetime, int] = collections.Counter() + prev_logical_date = DateTime.min + total_planned = 0 + + while total_planned < self.MAX_PLANNED_RUNS: + curr_info = dag.timetable.next_dagrun_info( + last_automated_data_interval=last_data_interval, + restriction=restriction, + ) + + if curr_info is None: # No more DAG runs to schedule + break + if curr_info.logical_date <= prev_logical_date: # Timetable not progressing, stopping + break + if curr_info.logical_date.year != year: # Crossed year boundary + break + + if not self._is_date_in_range(curr_info.logical_date, logical_date): + last_data_interval = curr_info.data_interval + prev_logical_date = curr_info.logical_date + total_planned += 1 + continue + + last_data_interval = curr_info.data_interval + dt = self._truncate_datetime_for_granularity(curr_info.logical_date, granularity) + dates[dt] += 1 + prev_logical_date = curr_info.logical_date + total_planned += 1 + + return [ + CalendarTimeRangeResponse(date=dt, state="planned", count=count) for dt, count in dates.items() + ] + + def _get_time_truncation_expression( + self, + column: sa.Column, + granularity: Literal["hourly", "daily"], + dialect: str, + ) -> sa.Column: + """ + Get database-specific time truncation expression for SQLAlchemy. + + We want to return always timestamp for both hourly and daily truncation. + Unfortunately different databases have different functions for truncating datetime, so we need to handle + them separately. + + Args: + column: The datetime column to truncate + granularity: Either "hourly" or "daily" + dialect: Database dialect ("postgresql", "mysql", "sqlite") + + Returns: + SQLAlchemy expression for time truncation + + Raises: + ValueError: If the dialect is not supported + """ + if granularity == "hourly": + if dialect == "postgresql": + expression = sa.func.date_trunc("hour", column) + elif dialect == "mysql": + expression = sa.func.date_format(column, "%Y-%m-%dT%H:00:00Z") + elif dialect == "sqlite": + expression = sa.func.strftime("%Y-%m-%dT%H:00:00Z", column) + else: + raise ValueError(f"Unsupported dialect: {dialect}") + else: + if dialect == "postgresql": + expression = sa.func.timezone("UTC", sa.func.cast(sa.func.cast(column, sa.Date), sa.DateTime)) + elif dialect == "mysql": + expression = sa.func.date_format(column, "%Y-%m-%dT%00:00:00Z") + elif dialect == "sqlite": + expression = sa.func.strftime("%Y-%m-%dT00:00:00Z", column) + else: + raise ValueError(f"Unsupported dialect: {dialect}") + return expression + + def _truncate_datetime_for_granularity( + self, + dt: datetime.datetime, + granularity: Literal["hourly", "daily"], + ) -> datetime.datetime: + """ + Truncate datetime based on granularity for planned tasks grouping. + + Args: + dt: The datetime to truncate + granularity: Either "hourly" or "daily" + + Returns: + Truncated datetime + """ + if granularity == "hourly": + return dt.replace(minute=0, second=0, microsecond=0) + return dt.replace(hour=0, minute=0, second=0, microsecond=0) + + def _is_date_in_range(self, dt: datetime.datetime, logical_date: RangeFilter) -> bool: + """Check if a date is within the specified range filter.""" + if not logical_date.value: + return True + + if logical_date.value.lower_bound and dt < logical_date.value.lower_bound: + return False + if logical_date.value.upper_bound and dt > logical_date.value.upper_bound: + return False + + return True diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py index 516ff9fda153d..68881ac0f108e 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py @@ -17,6 +17,8 @@ from __future__ import annotations +import contextlib +import importlib import logging from collections.abc import MutableMapping from functools import cache @@ -130,25 +132,68 @@ def mock_any_of(allowed_values: list) -> HookMetaService.MockEnum: """Mock for wtforms.validators.any_of.""" return HookMetaService.MockEnum(allowed_values) - with ( - mock.patch("wtforms.StringField", HookMetaService.MockStringField), - mock.patch("wtforms.fields.StringField", HookMetaService.MockStringField), - mock.patch("wtforms.fields.simple.StringField", HookMetaService.MockStringField), - mock.patch("wtforms.IntegerField", HookMetaService.MockIntegerField), - mock.patch("wtforms.fields.IntegerField", HookMetaService.MockIntegerField), - mock.patch("wtforms.PasswordField", HookMetaService.MockPasswordField), - mock.patch("wtforms.BooleanField", HookMetaService.MockBooleanField), - mock.patch("wtforms.fields.BooleanField", HookMetaService.MockBooleanField), - mock.patch("wtforms.fields.simple.BooleanField", HookMetaService.MockBooleanField), - mock.patch("flask_babel.lazy_gettext", mock_lazy_gettext), - mock.patch("flask_appbuilder.fieldwidgets.BS3TextFieldWidget", HookMetaService.MockAnyWidget), - mock.patch("flask_appbuilder.fieldwidgets.BS3TextAreaFieldWidget", HookMetaService.MockAnyWidget), - mock.patch("flask_appbuilder.fieldwidgets.BS3PasswordFieldWidget", HookMetaService.MockAnyWidget), - mock.patch("wtforms.validators.Optional", HookMetaService.MockOptional), - mock.patch("wtforms.validators.any_of", mock_any_of), - ): + with contextlib.ExitStack() as stack: + try: + importlib.import_module("wtforms") + stack.enter_context(mock.patch("wtforms.StringField", HookMetaService.MockStringField)) + stack.enter_context(mock.patch("wtforms.fields.StringField", HookMetaService.MockStringField)) + stack.enter_context( + mock.patch("wtforms.fields.simple.StringField", HookMetaService.MockStringField) + ) + + stack.enter_context(mock.patch("wtforms.IntegerField", HookMetaService.MockIntegerField)) + stack.enter_context( + mock.patch("wtforms.fields.IntegerField", HookMetaService.MockIntegerField) + ) + stack.enter_context(mock.patch("wtforms.PasswordField", HookMetaService.MockPasswordField)) + stack.enter_context(mock.patch("wtforms.BooleanField", HookMetaService.MockBooleanField)) + stack.enter_context( + mock.patch("wtforms.fields.BooleanField", HookMetaService.MockBooleanField) + ) + stack.enter_context( + mock.patch("wtforms.fields.simple.BooleanField", HookMetaService.MockBooleanField) + ) + stack.enter_context(mock.patch("wtforms.validators.Optional", HookMetaService.MockOptional)) + stack.enter_context(mock.patch("wtforms.validators.any_of", mock_any_of)) + except ImportError: + pass + + try: + importlib.import_module("flask_babel") + stack.enter_context(mock.patch("flask_babel.lazy_gettext", mock_lazy_gettext)) + except ImportError: + pass + + try: + importlib.import_module("flask_appbuilder") + stack.enter_context( + mock.patch( + "flask_appbuilder.fieldwidgets.BS3TextFieldWidget", HookMetaService.MockAnyWidget + ) + ) + stack.enter_context( + mock.patch( + "flask_appbuilder.fieldwidgets.BS3TextAreaFieldWidget", HookMetaService.MockAnyWidget + ) + ) + stack.enter_context( + mock.patch( + "flask_appbuilder.fieldwidgets.BS3PasswordFieldWidget", HookMetaService.MockAnyWidget + ) + ) + except ImportError: + pass + pm = ProvidersManager() - return pm.hooks, pm.connection_form_widgets, pm.field_behaviours + pm._cleanup() # Remove any cached hooks with non mocked FAB + pm._init_airflow_core_hooks() # Initialize core hooks + return pm.hooks, pm.connection_form_widgets, pm.field_behaviours # Will init providers hooks + + return ( + {}, + {}, + {}, + ) # Make mypy happy, should never been reached https://github.com/python/mypy/issues/7726 @staticmethod def _make_standard_fields(field_behaviour: dict | None) -> StandardHookFields | None: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py index 45ba4e42837d0..b8e569f877e70 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py @@ -17,303 +17,27 @@ from __future__ import annotations -from functools import cache -from operator import methodcaller -from typing import Callable -from uuid import UUID +from collections import Counter +from collections.abc import Iterable import structlog -from sqlalchemy import select -from typing_extensions import Any -from airflow import DAG -from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.parameters import ( - state_priority, -) -from airflow.api_fastapi.core_api.datamodels.ui.grid import ( - GridTaskInstanceSummary, -) -from airflow.api_fastapi.core_api.datamodels.ui.structure import ( - StructureDataResponse, -) -from airflow.configuration import conf -from airflow.models.baseoperator import BaseOperator as DBBaseOperator -from airflow.models.dag_version import DagVersion +from airflow.api_fastapi.common.parameters import state_priority from airflow.models.taskmap import TaskMap -from airflow.sdk import BaseOperator from airflow.sdk.definitions.mappedoperator import MappedOperator -from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup -from airflow.serialization.serialized_objects import SerializedDAG -from airflow.utils.state import TaskInstanceState -from airflow.utils.task_group import task_group_to_dict +from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup, get_task_group_children_getter +from airflow.serialization.serialized_objects import SerializedBaseOperator log = structlog.get_logger(logger_name=__name__) -@cache -def get_task_group_children_getter() -> Callable: - """Get the Task Group Children Getter for the DAG.""" - sort_order = conf.get("webserver", "grid_view_sorting_order") - if sort_order == "topological": - return methodcaller("topological_sort") - return methodcaller("hierarchical_alphabetical_sort") - - -def get_task_group_map(dag: DAG) -> dict[str, dict[str, Any]]: - """ - Get the Task Group Map for the DAG. - - :param dag: DAG - - :return: Task Group Map - """ - task_nodes: dict[str, dict[str, Any]] = {} - - def _is_task_node_mapped_task_group(task_node: BaseOperator | MappedTaskGroup | TaskMap | None) -> bool: - """Check if the Task Node is a Mapped Task Group.""" - return type(task_node) is MappedTaskGroup - - def _append_child_task_count_to_parent( - child_task_count: int | MappedTaskGroup | TaskMap | MappedOperator | None, - parent_node: BaseOperator | MappedTaskGroup | TaskMap | None, - ): - """ - Append the Child Task Count to the Parent. - - This method should only be used for Mapped Models. - """ - if isinstance(parent_node, TaskGroup): - # Remove the regular task counted in parent_node - task_nodes[parent_node.node_id]["task_count"].append(-1) - # Add the mapped task to the parent_node - task_nodes[parent_node.node_id]["task_count"].append(child_task_count) - - def _fill_task_group_map( - task_node: BaseOperator | MappedTaskGroup | TaskMap | None, - parent_node: BaseOperator | MappedTaskGroup | TaskMap | None, - ) -> None: - """Recursively fill the Task Group Map.""" - if task_node is None: - return - - if isinstance(task_node, MappedOperator): - task_nodes[task_node.node_id] = { - "is_group": False, - "parent_id": parent_node.node_id if parent_node else None, - "task_count": [task_node], - } - # Add the Task Count to the Parent Node because parent node is a Task Group - _append_child_task_count_to_parent(child_task_count=task_node, parent_node=parent_node) - return - - if isinstance(task_node, TaskGroup): - task_count = task_node if _is_task_node_mapped_task_group(task_node) else len(task_node.children) - task_nodes[task_node.node_id] = { - "is_group": True, - "parent_id": parent_node.node_id if parent_node else None, - "task_count": [task_count], - } - for child in get_task_group_children_getter()(task_node): - _fill_task_group_map(task_node=child, parent_node=task_node) - return - - if isinstance(task_node, BaseOperator): - task_nodes[task_node.task_id] = { - "is_group": False, - "parent_id": parent_node.node_id if parent_node else None, - "task_count": task_nodes[parent_node.node_id]["task_count"] - if _is_task_node_mapped_task_group(parent_node) and parent_node - else [1], - } - # No Need to Add the Task Count to the Parent Node, these are already counted in Add the Parent - return - - for node in [child for child in get_task_group_children_getter()(dag.task_group)]: - _fill_task_group_map(task_node=node, parent_node=None) - - return task_nodes - - -def get_child_task_map(parent_task_id: str, task_node_map: dict[str, dict[str, Any]]): - """Get the Child Task Map for the Parent Task ID.""" - return [task_id for task_id, task_map in task_node_map.items() if task_map["parent_id"] == parent_task_id] - - -def _get_total_task_count( - run_id: str, task_count: list[int | MappedTaskGroup | MappedOperator], session: SessionDep -) -> int: - return sum( - node - if isinstance(node, int) - else ( - DBBaseOperator.get_mapped_ti_count(node, run_id=run_id, session=session) or 0 - if isinstance(node, (MappedTaskGroup, MappedOperator)) - else node - ) - for node in task_count - ) - - -def fill_task_instance_summaries( - grouped_task_instances: dict[tuple[str, str], list], - task_instance_summaries_to_fill: dict[str, list], - session: SessionDep, -) -> None: - """ - Fill the Task Instance Summaries for the Grouped Task Instances. - - :param grouped_task_instances: Grouped Task Instances - :param task_instance_summaries_to_fill: Task Instance Summaries to fill - :param task_node_map: Task Node Map - :param session: Session - - :return: None - """ - # Additional logic to calculate the overall states to cascade recursive task states - overall_states: dict[tuple[str, str], str] = { - (task_id, run_id): next( - ( - str(state.value) - for state in state_priority - for ti in tis - if state is not None and ti.state == state - ), - "no_status", - ) - for (task_id, run_id), tis in grouped_task_instances.items() - } - - serdag_cache: dict[UUID, SerializedDAG] = {} - task_group_map_cache: dict[UUID, dict[str, dict[str, Any]]] = {} - - for (task_id, run_id), tis in grouped_task_instances.items(): - if not tis: - continue - - sdm = _get_serdag(tis[0], session) - serdag_cache[sdm.id] = serdag_cache.get(sdm.id) or sdm.dag - dag = serdag_cache[sdm.id] - task_group_map_cache[sdm.id] = task_group_map_cache.get(sdm.id) or get_task_group_map(dag=dag) - task_node_map = task_group_map_cache[sdm.id] - ti_try_number = max([ti.try_number for ti in tis]) - ti_start_date = min([ti.start_date for ti in tis if ti.start_date], default=None) - ti_end_date = max([ti.end_date for ti in tis if ti.end_date], default=None) - ti_queued_dttm = min([ti.queued_dttm for ti in tis if ti.queued_dttm], default=None) - ti_note = min([ti.note for ti in tis if ti.note], default=None) - - # Calculate the child states for the task - # Initialize the child states with 0 - child_states = {"no_status" if state is None else state.name.lower(): 0 for state in state_priority} - # Update Task States for non-grouped tasks - child_states.update( - { - "no_status" if state is None else state.name.lower(): len( - [ti for ti in tis if ti.state == state] - if not task_node_map[task_id]["is_group"] - else [ - ti - for ti in tis - if ti.state == state and ti.task_id in get_child_task_map(task_id, task_node_map) - ] - ) - for state in state_priority - } - ) - - # Update Nested Task Group States by aggregating the child states - child_states.update( - { - overall_states[(task_node_id, run_id)].lower(): child_states.get( - overall_states[(task_node_id, run_id)].lower(), 0 - ) - + 1 - for task_node_id in get_child_task_map(task_id, task_node_map) - if task_node_map[task_node_id]["is_group"] and (task_node_id, run_id) in overall_states - } - ) - - # Get the overall state for the task - overall_ti_state = next( - ( - state - for state in state_priority - for state_name, state_count in child_states.items() - if state_count > 0 and state_name == state - ), - "no_status", - ) - - # Task Count is either integer or a TaskGroup to get the task count - task_instance_summaries_to_fill[run_id].append( - GridTaskInstanceSummary( - task_id=task_id, - try_number=ti_try_number, - start_date=ti_start_date, - end_date=ti_end_date, - queued_dttm=ti_queued_dttm, - child_states=child_states, - task_count=_get_total_task_count(run_id, task_node_map[task_id]["task_count"], session), - state=TaskInstanceState[overall_ti_state.upper()] - if overall_ti_state != "no_status" - else None, - note=ti_note, - ) - ) - - -def get_structure_from_dag(dag: DAG) -> StructureDataResponse: - """If we do not have TIs, we just get the structure from the DAG.""" - nodes = [task_group_to_dict(child) for child in dag.task_group.topological_sort()] - return StructureDataResponse(nodes=nodes, edges=[]) - - -def _get_serdag(ti, session): - dag_version = ti.dag_version - if not dag_version: - dag_version = session.scalar( - select(DagVersion) - .where( - DagVersion.dag_id == ti.dag_id, - ) - .order_by(DagVersion.id) # ascending cus this is mostly for pre-3.0 upgrade - .limit(1) - ) - if not dag_version: - raise RuntimeError("No dag_version object could be found.") - if not dag_version.serialized_dag: - log.error( - "No serialized dag found", - dag_id=dag_version.dag_id, - version_id=dag_version.id, - version_number=dag_version.version_number, - ) - return dag_version.serialized_dag - - -def get_combined_structure(task_instances, session): - """Given task instances with varying DAG versions, get a combined structure.""" - merged_nodes = [] - # we dedup with serdag, as serdag.dag varies somehow? - serdags = {_get_serdag(ti, session) for ti in task_instances} - dags = [] - for serdag in serdags: - if serdag: - dags.append(serdag.dag) - for dag in dags: - nodes = [task_group_to_dict(child) for child in dag.task_group.topological_sort()] - _merge_node_dicts(merged_nodes, nodes) - - return StructureDataResponse(nodes=merged_nodes, edges=[]) - - def _merge_node_dicts(current, new) -> None: current_ids = {node["id"] for node in current} for node in new: if node["id"] in current_ids: current_node = _get_node_by_id(current, node["id"]) # if we have children, merge those as well - if "children" in current_node: + if current_node.get("children"): _merge_node_dicts(current_node["children"], node["children"]) else: current.append(node) @@ -324,3 +48,81 @@ def _get_node_by_id(nodes, node_id): if node["id"] == node_id: return node return {} + + +def agg_state(states): + states = Counter(states) + for state in state_priority: + if state in states: + return state + return None + + +def _get_aggs_for_node(detail): + states = [x["state"] for x in detail] + try: + min_start_date = min(x["start_date"] for x in detail if x["start_date"]) + except ValueError: + min_start_date = None + try: + max_end_date = max(x["end_date"] for x in detail if x["end_date"]) + except ValueError: + max_end_date = None + return { + "state": agg_state(states), + "min_start_date": min_start_date, + "max_end_date": max_end_date, + "child_states": dict(Counter(states)), + } + + +def _find_aggregates( + node: TaskGroup | MappedTaskGroup | SerializedBaseOperator | TaskMap, + parent_node: TaskGroup | MappedTaskGroup | SerializedBaseOperator | TaskMap | None, + ti_details: dict[str, list], +) -> Iterable[dict]: + """Recursively fill the Task Group Map.""" + node_id = node.node_id + parent_id = parent_node.node_id if parent_node else None + details = ti_details[node_id] + + if node is None: + return + if isinstance(node, MappedOperator): + yield { + "task_id": node_id, + "type": "mapped_task", + "parent_id": parent_id, + **_get_aggs_for_node(details), + } + + return + if isinstance(node, TaskGroup): + children = [] + for child in get_task_group_children_getter()(node): + for child_node in _find_aggregates(node=child, parent_node=node, ti_details=ti_details): + if child_node["parent_id"] == node_id: + children.append( + { + "state": child_node["state"], + "start_date": child_node["min_start_date"], + "end_date": child_node["max_end_date"], + } + ) + yield child_node + if node_id: + yield { + "task_id": node_id, + "type": "group", + "parent_id": parent_id, + **_get_aggs_for_node(children), + } + return + if isinstance(node, SerializedBaseOperator): + yield { + "task_id": node_id, + "type": "task", + "parent_id": parent_id, + **_get_aggs_for_node(details), + } + return diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/structure.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/structure.py index 128dc93b7706d..db3d1ba6deac4 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/structure.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/structure.py @@ -23,6 +23,16 @@ from __future__ import annotations +from collections import defaultdict + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from airflow.models.asset import AssetAliasModel, AssetEvent +from airflow.models.dag_version import DagVersion +from airflow.models.dagrun import DagRun +from airflow.models.serialized_dag import SerializedDagModel + def get_upstream_assets( asset_expression: dict, entry_node_ref: str, level: int = 0 @@ -112,3 +122,64 @@ def get_upstream_assets( edges = edges + e return nodes, edges + + +def bind_output_assets_to_tasks( + edges: list[dict], serialized_dag: SerializedDagModel, version_number: int, session: Session +) -> None: + """ + Try to bind the downstream assets to the relevant task that produces them. + + This function will mutate the `edges` in place. + """ + # bind normal assets present in the `task_outlet_asset_references` + outlet_asset_references = serialized_dag.dag_model.task_outlet_asset_references + + downstream_asset_edges = [ + edge + for edge in edges + if edge["target_id"].startswith("asset:") and not edge.get("resolved_from_alias") + ] + + for edge in downstream_asset_edges: + # Try to attach the outlet assets to the relevant tasks + asset_id = int(edge["target_id"].replace("asset:", "", 1)) + outlet_asset_reference = next( + outlet_asset_reference + for outlet_asset_reference in outlet_asset_references + if outlet_asset_reference.asset_id == asset_id + ) + edge["source_id"] = outlet_asset_reference.task_id + + # bind assets resolved from aliases, they do not populate the `outlet_asset_references` + downstream_alias_resolved_edges = [ + edge for edge in edges if edge["target_id"].startswith("asset:") and edge.get("resolved_from_alias") + ] + + aliases_names = {edges["resolved_from_alias"] for edges in downstream_alias_resolved_edges} + + result = session.scalars( + select(AssetEvent) + .join(AssetEvent.source_aliases) + .join(AssetEvent.source_dag_run) + # That's a simplification, instead doing `version_number` in `DagRun.dag_versions`. + .join(DagRun.created_dag_version) + .where(AssetEvent.source_aliases.any(AssetAliasModel.name.in_(aliases_names))) + .where(AssetEvent.source_dag_run.has(DagRun.dag_id == serialized_dag.dag_model.dag_id)) + .where(DagVersion.version_number == version_number) + ).unique() + + asset_id_to_task_ids = defaultdict(set) + for asset_event in result: + asset_id_to_task_ids[asset_event.asset_id].add(asset_event.source_task_id) + + for edge in downstream_alias_resolved_edges: + asset_id = int(edge["target_id"].replace("asset:", "", 1)) + task_ids = asset_id_to_task_ids.get(asset_id, set()) + + for index, task_id in enumerate(task_ids): + if index == 0: + edge["source_id"] = task_id + continue + edge_copy = {**edge, "source_id": task_id} + edges.append(edge_copy) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/app.py b/airflow-core/src/airflow/api_fastapi/execution_api/app.py index c4c924180bb20..1f530fbc34cdf 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/app.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/app.py @@ -39,6 +39,7 @@ if TYPE_CHECKING: import httpx + from fastapi.routing import APIRoute import structlog @@ -99,7 +100,7 @@ class CadwynWithOpenAPICustomization(Cadwyn): # Workaround lack of customzation https://github.com/zmievsa/cadwyn/issues/255 async def openapi_jsons(self, req: Request) -> JSONResponse: resp = await super().openapi_jsons(req) - open_apischema = json.loads(resp.body) # type: ignore[arg-type] + open_apischema = json.loads(resp.body) open_apischema = self.customize_openapi(open_apischema) resp.body = resp.render(open_apischema) @@ -113,6 +114,10 @@ def customize_openapi(self, openapi_schema: dict[str, Any]) -> dict[str, Any]: This is particularly useful for client SDKs that require models for types not directly exposed in any endpoint's request or response schema. + We also replace ``anyOf`` with ``oneOf`` in the API spec as this produces better results for the code + generators. This is because anyOf can technically be more than of the given schemas, but 99.9% of the + time (perhaps 100% in this API) the types are mutually exclusive, so oneOf is more correct + References: - https://fastapi.tiangolo.com/how-to/extending-openapi/#modify-the-openapi-schema """ @@ -124,11 +129,23 @@ def customize_openapi(self, openapi_schema: dict[str, Any]) -> dict[str, Any]: # The `JsonValue` component is missing any info. causes issues when generating models openapi_schema["components"]["schemas"]["JsonValue"] = { "title": "Any valid JSON value", - "anyOf": [ + "oneOf": [ {"type": t} for t in ("string", "number", "integer", "object", "array", "boolean", "null") ], } + def replace_any_of_with_one_of(spec): + if isinstance(spec, dict): + return { + ("oneOf" if key == "anyOf" else key): replace_any_of_with_one_of(value) + for key, value in spec.items() + } + if isinstance(spec, list): + return [replace_any_of_with_one_of(item) for item in spec] + return spec + + openapi_schema = replace_any_of_with_one_of(openapi_schema) + for comp in openapi_schema["components"]["schemas"].values(): for prop in comp.get("properties", {}).values(): # {"type": "string", "const": "deferred"} @@ -147,11 +164,16 @@ def create_task_execution_api_app() -> FastAPI: from airflow.api_fastapi.execution_api.routes import execution_api_router from airflow.api_fastapi.execution_api.versions import bundle + def custom_generate_unique_id(route: APIRoute): + # This is called only if the route doesn't provide an explicit operation ID + return route.name + # See https://docs.cadwyn.dev/concepts/version_changes/ for info about API versions app = CadwynWithOpenAPICustomization( title="Airflow Task Execution API", description="The private Airflow Task Execution API.", lifespan=lifespan, + generate_unique_id_function=custom_generate_unique_id, api_version_parameter_name="Airflow-API-Version", api_version_default_value=bundle.versions[0].value, versions=bundle, @@ -175,7 +197,7 @@ def get_extra_schemas() -> dict[str, dict]: """Get all the extra schemas that are not part of the main FastAPI app.""" from airflow.api_fastapi.execution_api.datamodels.taskinstance import TaskInstance from airflow.executors.workloads import BundleInfo - from airflow.utils.state import TerminalTIState + from airflow.utils.state import TaskInstanceState, TerminalTIState return { "TaskInstance": TaskInstance.model_json_schema(), @@ -183,6 +205,7 @@ def get_extra_schemas() -> dict[str, dict]: # Include the combined state enum too. In the datamodels we separate out SUCCESS from the other states # as that has different payload requirements "TerminalTIState": {"type": "string", "enum": list(TerminalTIState)}, + "TaskInstanceState": {"type": "string", "enum": list(TaskInstanceState)}, } @@ -201,17 +224,26 @@ class InProcessExecutionAPI: @cached_property def app(self): if not self._app: + from airflow.api_fastapi.common.dagbag import create_dag_bag from airflow.api_fastapi.execution_api.app import create_task_execution_api_app - from airflow.api_fastapi.execution_api.deps import JWTBearerDep, JWTRefresherDep + from airflow.api_fastapi.execution_api.deps import ( + JWTBearerDep, + JWTBearerTIPathDep, + JWTRefresherDep, + ) from airflow.api_fastapi.execution_api.routes.connections import has_connection_access from airflow.api_fastapi.execution_api.routes.variables import has_variable_access from airflow.api_fastapi.execution_api.routes.xcoms import has_xcom_access self._app = create_task_execution_api_app() + # Set up dag_bag in app state for dependency injection + self._app.state.dag_bag = create_dag_bag() + async def always_allow(): ... self._app.dependency_overrides[JWTBearerDep.dependency] = always_allow + self._app.dependency_overrides[JWTBearerTIPathDep.dependency] = always_allow self._app.dependency_overrides[JWTRefresherDep.dependency] = always_allow self._app.dependency_overrides[has_connection_access] = always_allow self._app.dependency_overrides[has_variable_access] = always_allow diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py new file mode 100644 index 0000000000000..adc37f00b7095 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py @@ -0,0 +1,67 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from typing import Any +from uuid import UUID + +from pydantic import Field + +from airflow.api_fastapi.core_api.base import BaseModel +from airflow.models.hitl import HITLDetail + + +class HITLDetailRequest(BaseModel): + """Schema for the request part of a Human-in-the-loop detail for a specific task instance.""" + + ti_id: UUID + options: list[str] = Field(min_length=1) + subject: str + body: str | None = None + defaults: list[str] | None = None + multiple: bool = False + params: dict[str, Any] = Field(default_factory=dict) + + +class UpdateHITLDetailPayload(BaseModel): + """Schema for writing the response part of a Human-in-the-loop detail for a specific task instance.""" + + ti_id: UUID + chosen_options: list[str] = Field(min_length=1) + params_input: dict[str, Any] = Field(default_factory=dict) + + +class HITLDetailResponse(BaseModel): + """Schema for the response part of a Human-in-the-loop detail for a specific task instance.""" + + response_received: bool + user_id: str | None + response_at: datetime | None + # It's empty if the user has not yet responded. + chosen_options: list[str] | None + params_input: dict[str, Any] = Field(default_factory=dict) + + @classmethod + def from_hitl_detail_orm(cls, hitl_detail: HITLDetail) -> HITLDetailResponse: + return HITLDetailResponse( + response_received=hitl_detail.response_received, + response_at=hitl_detail.response_at, + user_id=hitl_detail.user_id, + chosen_options=hitl_detail.chosen_options, + params_input=hitl_detail.params_input or {}, + ) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py index cd8287be97b73..7fa329e0f32b5 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py @@ -19,7 +19,7 @@ import uuid from datetime import timedelta from enum import Enum -from typing import Annotated, Any, Literal, Union +from typing import Annotated, Any, Literal from pydantic import ( AwareDatetime, @@ -35,7 +35,12 @@ from airflow.api_fastapi.execution_api.datamodels.asset import AssetProfile from airflow.api_fastapi.execution_api.datamodels.connection import ConnectionResponse from airflow.api_fastapi.execution_api.datamodels.variable import VariableResponse -from airflow.utils.state import IntermediateTIState, TaskInstanceState as TIState, TerminalTIState +from airflow.utils.state import ( + DagRunState, + IntermediateTIState, + TaskInstanceState as TIState, + TerminalTIState, +) from airflow.utils.types import DagRunType AwareDatetimeAdapter = TypeAdapter(AwareDatetime) @@ -66,6 +71,7 @@ class TerminalStateNonSuccess(str, Enum): FAILED = TerminalTIState.FAILED SKIPPED = TerminalTIState.SKIPPED REMOVED = TerminalTIState.REMOVED + UPSTREAM_FAILED = TerminalTIState.UPSTREAM_FAILED class TITerminalStatePayload(StrictBaseModel): @@ -75,6 +81,7 @@ class TITerminalStatePayload(StrictBaseModel): end_date: UtcDateTime """When the task completed executing""" + rendered_map_index: str | None = None class TISuccessStatePayload(StrictBaseModel): @@ -97,6 +104,7 @@ class TISuccessStatePayload(StrictBaseModel): task_outlets: Annotated[list[AssetProfile], Field(default_factory=list)] outlet_events: Annotated[list[dict[str, Any]], Field(default_factory=list)] + rendered_map_index: str | None = None class TITargetStatePayload(StrictBaseModel): @@ -136,6 +144,7 @@ class TIDeferredStatePayload(StrictBaseModel): Both forms will be passed along to the TaskSDK upon resume, the server will not handle either. """ + rendered_map_index: str | None = None class TIRescheduleStatePayload(StrictBaseModel): @@ -171,6 +180,7 @@ class TIRetryStatePayload(StrictBaseModel): ), ] end_date: UtcDateTime + rendered_map_index: str | None = None class TISkippedDownstreamTasksStatePayload(StrictBaseModel): @@ -209,14 +219,12 @@ def ti_state_discriminator(v: dict[str, str] | StrictBaseModel) -> str: # It is called "_terminal_" to avoid future conflicts if we added an actual state named "terminal" # and "_other_" is a catch-all for all other states that are not covered by the other schemas. TIStateUpdate = Annotated[ - Union[ - Annotated[TITerminalStatePayload, Tag("_terminal_")], - Annotated[TISuccessStatePayload, Tag("success")], - Annotated[TITargetStatePayload, Tag("_other_")], - Annotated[TIDeferredStatePayload, Tag("deferred")], - Annotated[TIRescheduleStatePayload, Tag("up_for_reschedule")], - Annotated[TIRetryStatePayload, Tag("up_for_retry")], - ], + Annotated[TITerminalStatePayload, Tag("_terminal_")] + | Annotated[TISuccessStatePayload, Tag("success")] + | Annotated[TITargetStatePayload, Tag("_other_")] + | Annotated[TIDeferredStatePayload, Tag("deferred")] + | Annotated[TIRescheduleStatePayload, Tag("up_for_reschedule")] + | Annotated[TIRetryStatePayload, Tag("up_for_retry")], Discriminator(ti_state_discriminator), ] @@ -239,6 +247,7 @@ class TaskInstance(BaseModel): dag_id: str run_id: str try_number: int + dag_version_id: uuid.UUID map_index: int = -1 hostname: str | None = None context_carrier: dict | None = None @@ -288,6 +297,7 @@ class DagRun(StrictBaseModel): end_date: UtcDateTime | None clear_number: int = 0 run_type: DagRunType + state: DagRunState conf: Annotated[dict[str, Any], Field(default_factory=dict)] consumed_asset_events: list[AssetEventDagRunReference] @@ -298,7 +308,7 @@ class TIRunContext(BaseModel): dag_run: DagRun """DAG run information for the task instance.""" - task_reschedule_count: Annotated[int, Field(default=0)] + task_reschedule_count: int = 0 """How many times the task has been rescheduled.""" max_tries: int @@ -310,7 +320,7 @@ class TIRunContext(BaseModel): connections: Annotated[list[ConnectionResponse], Field(default_factory=list)] """Connections that can be accessed by the task instance.""" - upstream_map_indexes: dict[str, int] | None = None + upstream_map_indexes: dict[str, int | list[int] | None] | None = None next_method: str | None = None """Method to call. Set when task resumes from a trigger.""" @@ -324,7 +334,7 @@ class TIRunContext(BaseModel): xcom_keys_to_clear: Annotated[list[str], Field(default_factory=list)] """List of Xcom keys that need to be cleared and purged on by the worker.""" - should_retry: bool + should_retry: bool = False """If the ti encounters an error, whether it should enter retry or failed state.""" @@ -341,3 +351,9 @@ class TaskStatesResponse(BaseModel): """Response for task states with run_id, task and state.""" task_states: dict[str, Any] + + +class InactiveAssetsResponse(BaseModel): + """Response for inactive assets.""" + + inactive_assets: Annotated[list[AssetProfile], Field(default_factory=list)] diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/xcom.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/xcom.py index ae7ddd26761cd..4df3e3f74f059 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/xcom.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/xcom.py @@ -20,7 +20,7 @@ import sys from typing import Any -from pydantic import JsonValue +from pydantic import JsonValue, RootModel from airflow.api_fastapi.core_api.base import BaseModel @@ -36,3 +36,15 @@ class XComResponse(BaseModel): key: str value: JsonValue """The returned XCom value in a JSON-compatible format.""" + + +class XComSequenceIndexResponse(RootModel): + """XCom schema with minimal structure for index-based access.""" + + root: JsonValue + + +class XComSequenceSliceResponse(RootModel): + """XCom schema with minimal structure for slice-based access.""" + + root: list[JsonValue] diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/deps.py b/airflow-core/src/airflow/api_fastapi/execution_api/deps.py index 8106a7e81e37b..2648a64ffad7a 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/deps.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/deps.py @@ -20,7 +20,7 @@ import sys import time -from typing import Any, Optional +from typing import Any import structlog import svcs @@ -55,8 +55,8 @@ class JWTBearer(HTTPBearer): def __init__( self, - path_param_name: Optional[str] = None, - required_claims: Optional[dict[str, Any]] = None, + path_param_name: str | None = None, + required_claims: dict[str, Any] | None = None, ): super().__init__(auto_error=False) self.path_param_name = path_param_name @@ -66,7 +66,7 @@ async def __call__( # type: ignore[override] self, request: Request, services=DepContainer, - ) -> Optional[TIToken]: + ) -> TIToken | None: creds = await super().__call__(request) if not creds: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing auth token") @@ -96,6 +96,9 @@ async def __call__( # type: ignore[override] JWTBearerDep: TIToken = Depends(JWTBearer()) +# This checks that the UUID in the url matches the one in the token for us. +JWTBearerTIPathDep = Depends(JWTBearer(path_param_name="task_instance_id")) + class JWTReissuer: """Re-issue JWTs to requests when they are about to run out.""" diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py index 164c3f0942d1f..89d96083876db 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py @@ -26,6 +26,7 @@ connections, dag_runs, health, + hitl, task_instances, task_reschedules, variables, @@ -48,5 +49,6 @@ ) authenticated_router.include_router(variables.router, prefix="/variables", tags=["Variables"]) authenticated_router.include_router(xcoms.router, prefix="/xcoms", tags=["XComs"]) +authenticated_router.include_router(hitl.router, prefix="/hitlDetails", tags=["Human in the Loop"]) execution_api_router.include_router(authenticated_router) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/asset_events.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/asset_events.py index ec7a50705f215..09fb0fd5e679f 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/asset_events.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/asset_events.py @@ -19,11 +19,10 @@ from typing import Annotated -from fastapi import HTTPException, Query, status +from fastapi import APIRouter, HTTPException, Query, status from sqlalchemy import and_, select from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.execution_api.datamodels.asset import AssetResponse from airflow.api_fastapi.execution_api.datamodels.asset_event import ( AssetEventResponse, @@ -32,7 +31,7 @@ from airflow.models.asset import AssetAliasModel, AssetEvent, AssetModel # TODO: Add dependency on JWT token -router = AirflowRouter( +router = APIRouter( responses={ status.HTTP_404_NOT_FOUND: {"description": "Asset not found"}, status.HTTP_401_UNAUTHORIZED: {"description": "Unauthorized"}, diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/assets.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/assets.py index 213c599befb3e..316d4fab4770d 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/assets.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/assets.py @@ -19,16 +19,15 @@ from typing import Annotated -from fastapi import HTTPException, Query, status +from fastapi import APIRouter, HTTPException, Query, status from sqlalchemy import select from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.execution_api.datamodels.asset import AssetResponse from airflow.models.asset import AssetModel # TODO: Add dependency on JWT token -router = AirflowRouter( +router = APIRouter( responses={ status.HTTP_404_NOT_FOUND: {"description": "Asset not found"}, status.HTTP_401_UNAUTHORIZED: {"description": "Unauthorized"}, diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/dag_runs.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/dag_runs.py index 9044f9bb65d13..22237ee5e28cd 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/dag_runs.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/dag_runs.py @@ -20,21 +20,22 @@ import logging from typing import Annotated -from fastapi import HTTPException, Query, status +from fastapi import APIRouter, HTTPException, Query, status from sqlalchemy import func, select from airflow.api.common.trigger_dag import trigger_dag +from airflow.api_fastapi.common.dagbag import DagBagDep, get_dag_for_run from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.common.types import UtcDateTime from airflow.api_fastapi.execution_api.datamodels.dagrun import DagRunStateResponse, TriggerDAGRunPayload +from airflow.api_fastapi.execution_api.datamodels.taskinstance import DagRun from airflow.exceptions import DagRunAlreadyExists from airflow.models.dag import DagModel -from airflow.models.dagbag import DagBag -from airflow.models.dagrun import DagRun +from airflow.models.dagrun import DagRun as DagRunModel +from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType -router = AirflowRouter() +router = APIRouter() log = logging.getLogger(__name__) @@ -106,6 +107,7 @@ def clear_dag_run( dag_id: str, run_id: str, session: SessionDep, + dag_bag: DagBagDep, ): """Clear a DAG Run.""" dm = session.scalar(select(DagModel).where(~DagModel.is_stale, DagModel.dag_id == dag_id).limit(1)) @@ -124,8 +126,11 @@ def clear_dag_run( }, ) - dag_bag = DagBag(dag_folder=dm.fileloc, read_dags_from_db=True) - dag = dag_bag.get_dag(dag_id) + dag_run = session.scalar( + select(DagRunModel).where(DagRunModel.dag_id == dag_id, DagRunModel.run_id == run_id) + ) + dag = get_dag_for_run(dag_bag, dag_run=dag_run, session=session) + dag.clear(run_id=run_id) @@ -141,7 +146,9 @@ def get_dagrun_state( session: SessionDep, ) -> DagRunStateResponse: """Get a DAG Run State.""" - dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, DagRun.run_id == run_id)) + dag_run = session.scalar( + select(DagRunModel).where(DagRunModel.dag_id == dag_id, DagRunModel.run_id == run_id) + ) if dag_run is None: raise HTTPException( status.HTTP_404_NOT_FOUND, @@ -163,16 +170,45 @@ def get_dr_count( states: Annotated[list[str] | None, Query()] = None, ) -> int: """Get the count of DAG runs matching the given criteria.""" - query = select(func.count()).select_from(DagRun).where(DagRun.dag_id == dag_id) + query = select(func.count()).select_from(DagRunModel).where(DagRunModel.dag_id == dag_id) if logical_dates: - query = query.where(DagRun.logical_date.in_(logical_dates)) + query = query.where(DagRunModel.logical_date.in_(logical_dates)) if run_ids: - query = query.where(DagRun.run_id.in_(run_ids)) + query = query.where(DagRunModel.run_id.in_(run_ids)) if states: - query = query.where(DagRun.state.in_(states)) + query = query.where(DagRunModel.state.in_(states)) count = session.scalar(query) return count or 0 + + +@router.get("/{dag_id}/previous", status_code=status.HTTP_200_OK) +def get_previous_dagrun( + dag_id: str, + logical_date: UtcDateTime, + session: SessionDep, + state: Annotated[DagRunState | None, Query()] = None, +) -> DagRun | None: + """Get the previous DAG run before the given logical date, optionally filtered by state.""" + query = ( + select(DagRunModel) + .where( + DagRunModel.dag_id == dag_id, + DagRunModel.logical_date < logical_date, + ) + .order_by(DagRunModel.logical_date.desc()) + .limit(1) + ) + + if state: + query = query.where(DagRunModel.state == state) + + dag_run = session.scalar(query) + + if not dag_run: + return None + + return DagRun.model_validate(dag_run) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/health.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/health.py index bed519fdb2d44..d808f51e1db6a 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/health.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/health.py @@ -17,12 +17,12 @@ from __future__ import annotations +from fastapi import APIRouter from fastapi.responses import JSONResponse -from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.execution_api.deps import DepContainer -router = AirflowRouter() +router = APIRouter() @router.get("") diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py new file mode 100644 index 0000000000000..9a34392f3ad69 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -0,0 +1,141 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime, timezone +from uuid import UUID + +import structlog +from fastapi import APIRouter, HTTPException, status +from sqlalchemy import select + +from airflow.api_fastapi.common.db.common import SessionDep +from airflow.api_fastapi.execution_api.datamodels.hitl import ( + HITLDetailRequest, + HITLDetailResponse, + UpdateHITLDetailPayload, +) +from airflow.models.hitl import HITLDetail + +router = APIRouter() + +log = structlog.get_logger(__name__) + + +@router.post( + "/{task_instance_id}", + status_code=status.HTTP_201_CREATED, +) +def upsert_hitl_detail( + task_instance_id: UUID, + payload: HITLDetailRequest, + session: SessionDep, +) -> HITLDetailRequest: + """ + Create a Human-in-the-loop detail for a specific Task Instance. + + There're 3 cases handled here. + + 1. If a HITLOperator task instance does not have a HITLDetail, + a new HITLDetail is created without a response section. + 2. If a HITLOperator task instance has a HITLDetail but lacks a response, + the existing HITLDetail is returned. + This situation occurs when a task instance is cleared before a response is received. + 3. If a HITLOperator task instance has both a HITLDetail and a response section, + the existing response is removed, and the HITLDetail is returned. + This happens when a task instance is cleared after a response has been received. + This design ensures that each task instance has only one HITLDetail. + """ + ti_id_str = str(task_instance_id) + hitl_detail_model = session.scalar(select(HITLDetail).where(HITLDetail.ti_id == ti_id_str)) + if not hitl_detail_model: + hitl_detail_model = HITLDetail( + ti_id=ti_id_str, + options=payload.options, + subject=payload.subject, + body=payload.body, + defaults=payload.defaults, + multiple=payload.multiple, + params=payload.params, + ) + session.add(hitl_detail_model) + elif hitl_detail_model.response_received: + # Cleanup the response part of HITLDetail as we only store one response for one task instance. + # It normally happens after retry, we keep only the latest response. + hitl_detail_model.user_id = None + hitl_detail_model.response_at = None + hitl_detail_model.chosen_options = None + hitl_detail_model.params_input = {} + session.add(hitl_detail_model) + + return HITLDetailRequest.model_validate(hitl_detail_model) + + +def _check_hitl_detail_exists(hitl_detail_model: HITLDetail) -> None: + if not hitl_detail_model: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + detail={ + "reason": "not_found", + "message": ( + "HITLDetail not found. " + "This happens most likely due to clearing task instance before receiving response." + ), + }, + ) + + +@router.patch("/{task_instance_id}") +def update_hitl_detail( + task_instance_id: UUID, + payload: UpdateHITLDetailPayload, + session: SessionDep, +) -> HITLDetailResponse: + """Update the response part of a Human-in-the-loop detail for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_detail_model = session.execute(select(HITLDetail).where(HITLDetail.ti_id == ti_id_str)).scalar() + _check_hitl_detail_exists(hitl_detail_model) + if hitl_detail_model.response_received: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop detail for Task Instance with id {ti_id_str} already exists.", + ) + + hitl_detail_model.user_id = "Fallback to defaults" + hitl_detail_model.response_at = datetime.now(timezone.utc) + hitl_detail_model.chosen_options = payload.chosen_options + hitl_detail_model.params_input = payload.params_input + session.add(hitl_detail_model) + session.commit() + return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model) + + +@router.get( + "/{task_instance_id}", + status_code=status.HTTP_200_OK, +) +def get_hitl_detail( + task_instance_id: UUID, + session: SessionDep, +) -> HITLDetailResponse: + """Get Human-in-the-loop detail for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_detail_model = session.execute( + select(HITLDetail).where(HITLDetail.ti_id == ti_id_str), + ).scalar() + _check_hitl_detail_exists(hitl_detail_model) + return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py index af2bf86c7cde0..3892cf8d42967 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py @@ -17,23 +17,32 @@ from __future__ import annotations +import contextlib +import itertools import json -import logging from collections import defaultdict -from typing import Annotated, Any +from collections.abc import Iterator +from datetime import datetime +from typing import TYPE_CHECKING, Annotated, Any, cast from uuid import UUID +import attrs +import structlog from cadwyn import VersionedAPIRouter -from fastapi import Body, Depends, HTTPException, Query, Request, status +from fastapi import Body, HTTPException, Query, status from pydantic import JsonValue from sqlalchemy import func, or_, tuple_, update from sqlalchemy.exc import NoResultFound, SQLAlchemyError from sqlalchemy.orm import joinedload from sqlalchemy.sql import select +from structlog.contextvars import bind_contextvars +from airflow._shared.timezones import timezone +from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag from airflow.api_fastapi.common.db.common import SessionDep from airflow.api_fastapi.common.types import UtcDateTime from airflow.api_fastapi.execution_api.datamodels.taskinstance import ( + InactiveAssetsResponse, PrevSuccessfulDagRunResponse, TaskStatesResponse, TIDeferredStatePayload, @@ -47,27 +56,35 @@ TISuccessStatePayload, TITerminalStatePayload, ) -from airflow.api_fastapi.execution_api.deps import JWTBearer -from airflow.models.dagbag import DagBag +from airflow.api_fastapi.execution_api.deps import JWTBearerTIPathDep +from airflow.exceptions import TaskNotFound +from airflow.models.asset import AssetActive from airflow.models.dagrun import DagRun as DR from airflow.models.taskinstance import TaskInstance as TI, _stop_remaining_tasks from airflow.models.taskreschedule import TaskReschedule from airflow.models.trigger import Trigger from airflow.models.xcom import XComModel -from airflow.utils import timezone +from airflow.sdk.definitions._internal.expandinput import NotFullyPopulated +from airflow.sdk.definitions.asset import Asset, AssetUniqueKey from airflow.utils.state import DagRunState, TaskInstanceState, TerminalTIState +if TYPE_CHECKING: + from sqlalchemy.sql.dml import Update + + from airflow.models.expandinput import SchedulerExpandInput + from airflow.sdk.types import Operator + router = VersionedAPIRouter() ti_id_router = VersionedAPIRouter( dependencies=[ # This checks that the UUID in the url matches the one in the token for us. - Depends(JWTBearer(path_param_name="task_instance_id")), + JWTBearerTIPathDep ] ) -log = logging.getLogger(__name__) +log = structlog.get_logger(__name__) @ti_id_router.patch( @@ -81,7 +98,10 @@ response_model_exclude_unset=True, ) def ti_run( - task_instance_id: UUID, ti_run_payload: Annotated[TIEnterRunningPayload, Body()], session: SessionDep + task_instance_id: UUID, + ti_run_payload: Annotated[TIEnterRunningPayload, Body()], + session: SessionDep, + dag_bag: DagBagDep, ) -> TIRunContext: """ Run a TaskInstance. @@ -90,6 +110,13 @@ def ti_run( """ # We only use UUID above for validation purposes ti_id_str = str(task_instance_id) + bind_contextvars(ti_id=ti_id_str) + log.debug( + "Starting task instance run", + hostname=ti_run_payload.hostname, + unixname=ti_run_payload.unixname, + pid=ti_run_payload.pid, + ) from sqlalchemy.sql import column from sqlalchemy.types import JSON @@ -118,8 +145,9 @@ def ti_run( ) try: ti = session.execute(old).one() + log.debug("Retrieved task instance details", state=ti.state, dag_id=ti.dag_id, task_id=ti.task_id) except NoResultFound: - log.error("Task Instance %s not found", ti_id_str) + log.error("Task Instance not found") raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail={ @@ -131,23 +159,27 @@ def ti_run( # We exclude_unset to avoid updating fields that are not set in the payload data = ti_run_payload.model_dump(exclude_unset=True) + # don't update start date when resuming from deferral + if ti.next_kwargs: + data.pop("start_date") + log.debug("Removed start_date from update as task is resuming from deferral") + query = update(TI).where(TI.id == ti_id_str).values(data) previous_state = ti.state # If we are already running, but this is a duplicate request from the same client return the same OK # -- it's possible there was a network glitch and they never got the response - if previous_state == TaskInstanceState.RUNNING and (ti["hostname"], ti["unixname"], ti["pid"]) == ( + if previous_state == TaskInstanceState.RUNNING and (ti.hostname, ti.unixname, ti.pid) == ( ti_run_payload.hostname, ti_run_payload.unixname, ti_run_payload.pid, ): - log.info("Duplicate start request received from %s ", ti_run_payload.hostname) + log.info("Duplicate start request received", hostname=ti_run_payload.hostname) elif previous_state not in (TaskInstanceState.QUEUED, TaskInstanceState.RESTARTING): log.warning( - "Can not start Task Instance ('%s') in invalid state: %s", - ti_id_str, - previous_state, + "Cannot start Task Instance in invalid state", + previous_state=previous_state, ) # TODO: Pass a RFC 9457 compliant error message in "detail" field @@ -164,7 +196,7 @@ def ti_run( }, ) else: - log.info("Task with %s state started on %s ", previous_state, ti_run_payload.hostname) + log.info("Task started", previous_state=previous_state, hostname=ti_run_payload.hostname) # Ensure there is no end date set. query = query.values( end_date=None, @@ -177,7 +209,7 @@ def ti_run( try: result = session.execute(query) - log.info("TI %s state updated: %s row(s) affected", ti_id_str, result.rowcount) + log.info("Task instance state updated", rows_affected=result.rowcount) dr = ( session.scalars( @@ -190,6 +222,7 @@ def ti_run( ) if not dr: + log.error("DagRun not found", dag_id=ti.dag_id, run_id=ti.run_id) raise ValueError(f"DagRun with dag_id={ti.dag_id} and run_id={ti.run_id} not found.") # Send the keys to the SDK so that the client requests to clear those XComs from the server. @@ -219,6 +252,13 @@ def ti_run( or 0 ) + if dag := dag_bag.get_dag_for_run(dag_run=dr, session=session): + upstream_map_indexes = dict( + _get_upstream_map_indexes(dag.get_task(ti.task_id), ti.map_index, ti.run_id, session) + ) + else: + upstream_map_indexes = None + context = TIRunContext( dag_run=dr, task_reschedule_count=task_reschedule_count, @@ -228,6 +268,7 @@ def ti_run( connections=[], xcom_keys_to_clear=xcom_keys, should_retry=_is_eligible_to_retry(previous_state, ti.try_number, ti.max_tries), + upstream_map_indexes=upstream_map_indexes, ) # Only set if they are non-null @@ -243,6 +284,36 @@ def ti_run( ) +def _get_upstream_map_indexes( + task: Operator, ti_map_index: int, run_id: str, session: SessionDep +) -> Iterator[tuple[str, int | list[int] | None]]: + task_mapped_group = task.get_closest_mapped_task_group() + for upstream_task in task.upstream_list: + upstream_mapped_group = upstream_task.get_closest_mapped_task_group() + map_indexes: int | list[int] | None + if upstream_mapped_group is None: + # regular tasks or non-mapped task groups + map_indexes = None + elif task_mapped_group == upstream_mapped_group: + # tasks in the same mapped task group hierarchy + map_indexes = ti_map_index + else: + # tasks not in the same mapped task group + # the upstream mapped task group should combine the return xcom as a list and return it + mapped_ti_count: int + try: + # for cases that does not need to resolve xcom + mapped_ti_count = upstream_mapped_group.get_parse_time_mapped_ti_count() + except NotFullyPopulated: + # for cases that needs to resolve xcom to get the correct count + mapped_ti_count = cast( + "SchedulerExpandInput", upstream_mapped_group._expand_input + ).get_total_map_length(run_id, session=session) + map_indexes = list(range(mapped_ti_count)) if mapped_ti_count is not None else None + + yield upstream_task.task_id, map_indexes + + @ti_id_router.patch( "/{task_instance_id}/state", status_code=status.HTTP_204_NO_CONTENT, @@ -256,7 +327,7 @@ def ti_update_state( task_instance_id: UUID, ti_patch_payload: Annotated[TIStateUpdate, Body()], session: SessionDep, - request: Request, + dag_bag: DagBagDep, ): """ Update the state of a TaskInstance. @@ -264,10 +335,12 @@ def ti_update_state( Not all state transitions are valid, and transitioning to some states requires extra information to be passed along. (Check out the datamodels for details, the rendered docs might not reflect this accurately) """ - updated_state: str = "" - # We only use UUID above for validation purposes ti_id_str = str(task_instance_id) + bind_contextvars(ti_id=ti_id_str) + log.debug("Updating task instance state", new_state=ti_patch_payload.state) + + updated_state: str = "" old = select(TI.state, TI.try_number, TI.max_tries, TI.dag_id).where(TI.id == ti_id_str).with_for_update() try: @@ -277,8 +350,14 @@ def ti_update_state( max_tries, dag_id, ) = session.execute(old).one() + log.debug( + "Retrieved current task instance state", + previous_state=previous_state, + try_number=try_number, + max_tries=max_tries, + ) except NoResultFound: - log.error("Task Instance %s not found", ti_id_str) + log.error("Task Instance not found") raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail={ @@ -289,9 +368,8 @@ def ti_update_state( if previous_state != TaskInstanceState.RUNNING: log.warning( - "Cannot update Task Instance ('%s') because it is in an invalid state: %s for an update", - ti_id_str, - previous_state, + "Cannot update Task Instance in invalid state", + previous_state=previous_state, ) raise HTTPException( status_code=status.HTTP_409_CONFLICT, @@ -304,39 +382,75 @@ def ti_update_state( # We exclude_unset to avoid updating fields that are not set in the payload data = ti_patch_payload.model_dump(exclude={"task_outlets", "outlet_events"}, exclude_unset=True) - query = update(TI).where(TI.id == ti_id_str).values(data) - if isinstance(ti_patch_payload, TITerminalStatePayload): - updated_state = ti_patch_payload.state - query = TI.duration_expression_update(ti_patch_payload.end_date, query, session.bind) - query = query.values(state=updated_state) + try: + query, updated_state = _create_ti_state_update_query_and_update_state( + ti_patch_payload=ti_patch_payload, + ti_id_str=ti_id_str, + session=session, + query=query, + updated_state=updated_state, + dag_id=dag_id, + dag_bag=dag_bag, + ) + except Exception: + # Set a task to failed in case any unexpected exception happened during task state update + log.exception("Error updating Task Instance state to %s. Set the task to failed", updated_state) + ti = session.get(TI, ti_id_str) + query = TI.duration_expression_update(datetime.now(tz=timezone.utc), query, session.bind) + query = query.values(state=TaskInstanceState.FAILED) + _handle_fail_fast_for_dag(ti=ti, dag_id=dag_id, session=session, dag_bag=dag_bag) - if updated_state == TerminalTIState.FAILED: - ti = session.get(TI, ti_id_str) - ser_dag = request.app.state.dag_bag.get_dag(dag_id) - if ser_dag and getattr(ser_dag, "fail_fast", False): - task_dict = getattr(ser_dag, "task_dict") - task_teardown_map = {k: v.is_teardown for k, v in task_dict.items()} - _stop_remaining_tasks(task_instance=ti, task_teardown_map=task_teardown_map, session=session) - - elif isinstance(ti_patch_payload, TIRetryStatePayload): + # TODO: Replace this with FastAPI's Custom Exception handling: + # https://fastapi.tiangolo.com/tutorial/handling-errors/#install-custom-exception-handlers + try: + result = session.execute(query) + log.info("Task instance state updated", new_state=updated_state, rows_affected=result.rowcount) + except SQLAlchemyError as e: + log.error("Error updating Task Instance state", error=str(e)) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Database error occurred" + ) + + +def _handle_fail_fast_for_dag(ti: TI, dag_id: str, session: SessionDep, dag_bag: DagBagDep) -> None: + dr = ti.dag_run + ser_dag = dag_bag.get_dag_for_run(dag_run=dr, session=session) + if ser_dag and getattr(ser_dag, "fail_fast", False): + task_dict = getattr(ser_dag, "task_dict") + task_teardown_map = {k: v.is_teardown for k, v in task_dict.items()} + _stop_remaining_tasks(task_instance=ti, task_teardown_map=task_teardown_map, session=session) + + +def _create_ti_state_update_query_and_update_state( + *, + ti_patch_payload: TIStateUpdate, + ti_id_str: str, + query: Update, + updated_state, + session: SessionDep, + dag_bag: DagBagDep, + dag_id: str, +) -> tuple[Update, TaskInstanceState]: + if isinstance(ti_patch_payload, (TITerminalStatePayload, TIRetryStatePayload, TISuccessStatePayload)): ti = session.get(TI, ti_id_str) updated_state = ti_patch_payload.state - ti.prepare_db_for_next_try(session) - query = TI.duration_expression_update(ti_patch_payload.end_date, query, session.bind) - query = query.values(state=updated_state) - elif isinstance(ti_patch_payload, TISuccessStatePayload): query = TI.duration_expression_update(ti_patch_payload.end_date, query, session.bind) - updated_state = ti_patch_payload.state - task_instance = session.get(TI, ti_id_str) - TI.register_asset_changes_in_db( - task_instance, - ti_patch_payload.task_outlets, # type: ignore - ti_patch_payload.outlet_events, - session, - ) query = query.values(state=updated_state) + + if updated_state == TerminalTIState.FAILED: + # This is the only case needs extra handling for TITerminalStatePayload + _handle_fail_fast_for_dag(ti=ti, dag_id=dag_id, session=session, dag_bag=dag_bag) + elif isinstance(ti_patch_payload, TIRetryStatePayload): + ti.prepare_db_for_next_try(session) + elif isinstance(ti_patch_payload, TISuccessStatePayload): + TI.register_asset_changes_in_db( + ti, + ti_patch_payload.task_outlets, # type: ignore + ti_patch_payload.outlet_events, + session, + ) elif isinstance(ti_patch_payload, TIDeferredStatePayload): # Calculate timeout if it was passed timeout = None @@ -387,14 +501,17 @@ def ti_update_state( # As documented in https://dev.mysql.com/doc/refman/5.7/en/datetime.html. _MYSQL_TIMESTAMP_MAX = timezone.datetime(2038, 1, 19, 3, 14, 7) if ti_patch_payload.reschedule_date > _MYSQL_TIMESTAMP_MAX: - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail={ - "reason": "invalid_reschedule_date", - "message": f"Cannot reschedule to {ti_patch_payload.reschedule_date.isoformat()} " - f"since it is over MySQL's TIMESTAMP storage limit.", - }, + # Set a task to failed in case any unexpected exception happened during task state update + log.exception( + "Error updating Task Instance state to %s. Set the task to failed", updated_state ) + data = ti_patch_payload.model_dump(exclude={"reschedule_date"}, exclude_unset=True) + query = update(TI).where(TI.id == ti_id_str).values(data) + query = TI.duration_expression_update(datetime.now(tz=timezone.utc), query, session.bind) + query = query.values(state=TaskInstanceState.FAILED) + ti = session.get(TI, ti_id_str) + _handle_fail_fast_for_dag(ti=ti, dag_id=dag_id, session=session, dag_bag=dag_bag) + return query, updated_state task_instance = session.get(TI, ti_id_str) actual_start_date = timezone.utcnow() @@ -413,16 +530,10 @@ def ti_update_state( # clear the next_method and next_kwargs so that none of the retries pick them up query = query.values(state=TaskInstanceState.UP_FOR_RESCHEDULE, next_method=None, next_kwargs=None) updated_state = TaskInstanceState.UP_FOR_RESCHEDULE - # TODO: Replace this with FastAPI's Custom Exception handling: - # https://fastapi.tiangolo.com/tutorial/handling-errors/#install-custom-exception-handlers - try: - result = session.execute(query) - log.info("TI %s state updated to %s: %s row(s) affected", ti_id_str, updated_state, result.rowcount) - except SQLAlchemyError as e: - log.error("Error updating Task Instance state: %s", e) - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Database error occurred" - ) + else: + raise ValueError(f"Unexpected Payload Type {type(ti_patch_payload)}") + + return query, updated_state @ti_id_router.patch( @@ -439,12 +550,17 @@ def ti_skip_downstream( session: SessionDep, ): ti_id_str = str(task_instance_id) + bind_contextvars(ti_id=ti_id_str) + log.info("Skipping downstream tasks", task_count=len(ti_patch_payload.tasks)) + now = timezone.utcnow() tasks = ti_patch_payload.tasks dag_id, run_id = session.execute(select(TI.dag_id, TI.run_id).where(TI.id == ti_id_str)).fetchone() + log.debug("Retrieved DAG and run info", dag_id=dag_id, run_id=run_id) task_ids = [task if isinstance(task, tuple) else (task, -1) for task in tasks] + log.debug("Prepared task IDs for skipping", task_ids=task_ids) query = ( update(TI) @@ -454,7 +570,7 @@ def ti_skip_downstream( ) result = session.execute(query) - log.info("TI %s updated the state of %s task(s) to skipped", ti_id_str, result.rowcount) + log.info("Downstream tasks skipped", tasks_skipped=result.rowcount) @ti_id_router.put( @@ -475,6 +591,8 @@ def ti_heartbeat( ): """Update the heartbeat of a TaskInstance to mark it as alive & still running.""" ti_id_str = str(task_instance_id) + bind_contextvars(ti_id=ti_id_str) + log.debug("Processing heartbeat", hostname=ti_payload.hostname, pid=ti_payload.pid) # Hot path: since heartbeating a task is a very common operation, we try to do minimize the number of queries # and DB round trips as much as possible. @@ -483,8 +601,11 @@ def ti_heartbeat( try: (previous_state, hostname, pid) = session.execute(old).one() + log.debug( + "Retrieved current task state", state=previous_state, current_hostname=hostname, current_pid=pid + ) except NoResultFound: - log.error("Task Instance %s not found", ti_id_str) + log.error("Task Instance not found") raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail={ @@ -494,6 +615,13 @@ def ti_heartbeat( ) if hostname != ti_payload.hostname or pid != ti_payload.pid: + log.warning( + "Task running elsewhere", + current_hostname=hostname, + current_pid=pid, + requested_hostname=ti_payload.hostname, + requested_pid=ti_payload.pid, + ) raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail={ @@ -505,6 +633,7 @@ def ti_heartbeat( ) if previous_state != TaskInstanceState.RUNNING: + log.warning("Task not in running state", current_state=previous_state) raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail={ @@ -516,7 +645,7 @@ def ti_heartbeat( # Update the last heartbeat time! session.execute(update(TI).where(TI.id == ti_id_str).values(last_heartbeat_at=timezone.utcnow())) - log.debug("Task with %s state heartbeated", previous_state) + log.debug("Heartbeat updated", state=previous_state) @ti_id_router.put( @@ -539,12 +668,17 @@ def ti_put_rtif( ): """Add an RTIF entry for a task instance, sent by the worker.""" ti_id_str = str(task_instance_id) + bind_contextvars(ti_id=ti_id_str) + log.info("Updating RenderedTaskInstanceFields", field_count=len(put_rtif_payload)) + task_instance = session.scalar(select(TI).where(TI.id == ti_id_str)) if not task_instance: + log.error("Task Instance not found") raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, ) task_instance.update_rtif(put_rtif_payload, session) + log.debug("RenderedTaskInstanceFields updated successfully") return {"message": "Rendered task instance fields successfully set"} @@ -565,8 +699,12 @@ def get_previous_successful_dagrun( The data from this endpoint is used to get values for Task Context. """ ti_id_str = str(task_instance_id) + bind_contextvars(ti_id=ti_id_str) + log.debug("Retrieving previous successful DAG run") + task_instance = session.scalar(select(TI).where(TI.id == ti_id_str)) if not task_instance or not task_instance.logical_date: + log.debug("No task instance or logical date found") return PrevSuccessfulDagRunResponse() dag_run = session.scalar( @@ -580,15 +718,24 @@ def get_previous_successful_dagrun( .limit(1) ) if not dag_run: + log.debug("No previous successful DAG run found") return PrevSuccessfulDagRunResponse() + log.debug( + "Found previous successful DAG run", + dag_id=dag_run.dag_id, + run_id=dag_run.run_id, + logical_date=dag_run.logical_date, + ) return PrevSuccessfulDagRunResponse.model_validate(dag_run) @router.get("/count", status_code=status.HTTP_200_OK) -def get_count( +def get_task_instance_count( dag_id: str, session: SessionDep, + dag_bag: DagBagDep, + map_index: Annotated[int | None, Query()] = None, task_ids: Annotated[list[str] | None, Query()] = None, task_group_id: Annotated[str | None, Query()] = None, logical_dates: Annotated[list[UtcDateTime] | None, Query()] = None, @@ -601,6 +748,9 @@ def get_count( if task_ids: query = query.where(TI.task_id.in_(task_ids)) + if map_index is not None: + query = query.where(TI.map_index == map_index) + if logical_dates: query = query.where(TI.logical_date.in_(logical_dates)) @@ -608,10 +758,15 @@ def get_count( query = query.where(TI.run_id.in_(run_ids)) if task_group_id: - group_tasks = _get_group_tasks(dag_id, task_group_id, session, logical_dates, run_ids) + group_tasks = _get_group_tasks(dag_id, task_group_id, session, dag_bag, logical_dates, run_ids) # Get unique (task_id, map_index) pairs + task_map_pairs = [(ti.task_id, ti.map_index) for ti in group_tasks] + + if map_index is not None: + task_map_pairs = [(ti.task_id, ti.map_index) for ti in group_tasks if ti.map_index == map_index] + if not task_map_pairs: # If no task group tasks found, default to checking the task group ID itself # This matches the behavior in _get_external_task_group_task_ids @@ -631,20 +786,21 @@ def get_count( query = query.where(TI.state.in_(states)) count = session.scalar(query) - return count or 0 @router.get("/states", status_code=status.HTTP_200_OK) -def get_task_states( +def get_task_instance_states( dag_id: str, session: SessionDep, + dag_bag: DagBagDep, + map_index: Annotated[int | None, Query()] = None, task_ids: Annotated[list[str] | None, Query()] = None, task_group_id: Annotated[str | None, Query()] = None, logical_dates: Annotated[list[UtcDateTime] | None, Query()] = None, run_ids: Annotated[list[str] | None, Query()] = None, ) -> TaskStatesResponse: - """Get the task states for the given criteria.""" + """Get the states for Task Instances with the given criteria.""" run_id_task_state_map: dict[str, dict[str, Any]] = defaultdict(dict) query = select(TI).where(TI.dag_id == dag_id) @@ -660,12 +816,21 @@ def get_task_states( results = session.scalars(query).all() - [run_id_task_state_map[task.run_id].update({task.task_id: task.state}) for task in results] - if task_group_id: - group_tasks = _get_group_tasks(dag_id, task_group_id, session, logical_dates, run_ids) + group_tasks = _get_group_tasks(dag_id, task_group_id, session, dag_bag, logical_dates, run_ids) + + results = results + group_tasks if task_ids else group_tasks - [run_id_task_state_map[task.run_id].update({task.task_id: task.state}) for task in group_tasks] + if map_index is not None: + results = [task for task in results if task.map_index == map_index] + [ + run_id_task_state_map[task.run_id].update( + {task.task_id: task.state} + if task.map_index < 0 + else {f"{task.task_id}_{task.map_index}": task.state} + ) + for task in results + ] return TaskStatesResponse(task_states=run_id_task_state_map) @@ -682,18 +847,11 @@ def _is_eligible_to_retry(state: str, try_number: int, max_tries: int) -> bool: return max_tries != 0 and try_number <= max_tries -def _get_group_tasks(dag_id: str, task_group_id: str, session: SessionDep, logical_dates=None, run_ids=None): +def _get_group_tasks( + dag_id: str, task_group_id: str, session: SessionDep, dag_bag: DagBagDep, logical_dates=None, run_ids=None +): # Get all tasks in the task group - dag = DagBag(read_dags_from_db=True).get_dag(dag_id, session) - if not dag: - raise HTTPException( - status.HTTP_404_NOT_FOUND, - detail={ - "reason": "not_found", - "message": f"DAG {dag_id} not found", - }, - ) - + dag = get_latest_version_of_dag(dag_bag, dag_id, session, include_reason=True) task_group = dag.task_group_dict.get(task_group_id) if not task_group: raise HTTPException( @@ -717,5 +875,68 @@ def _get_group_tasks(dag_id: str, task_group_id: str, session: SessionDep, logic return group_tasks +@ti_id_router.get( + "/{task_instance_id}/validate-inlets-and-outlets", + status_code=status.HTTP_200_OK, + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Task Instance not found"}, + }, +) +def validate_inlets_and_outlets( + task_instance_id: UUID, + session: SessionDep, + dag_bag: DagBagDep, +) -> InactiveAssetsResponse: + """Validate whether there're inactive assets in inlets and outlets of a given task instance.""" + ti_id_str = str(task_instance_id) + bind_contextvars(ti_id=ti_id_str) + + ti = session.scalar(select(TI).where(TI.id == ti_id_str)) + if not ti: + log.error("Task Instance not found") + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={ + "reason": "not_found", + "message": "Task Instance not found", + }, + ) + + if not ti.task: + dr = ti.dag_run + dag = dag_bag.get_dag_for_run(dag_run=dr, session=session) + if dag: + with contextlib.suppress(TaskNotFound): + ti.task = dag.get_task(ti.task_id) + + inlets = [asset.asprofile() for asset in ti.task.inlets if isinstance(asset, Asset)] + outlets = [asset.asprofile() for asset in ti.task.outlets if isinstance(asset, Asset)] + if not (inlets or outlets): + return InactiveAssetsResponse(inactive_assets=[]) + + all_asset_unique_keys: set[AssetUniqueKey] = { + AssetUniqueKey.from_asset(inlet_or_outlet) # type: ignore + for inlet_or_outlet in itertools.chain(inlets, outlets) + } + active_asset_unique_keys = { + AssetUniqueKey(name, uri) + for name, uri in session.execute( + select(AssetActive.name, AssetActive.uri).where( + tuple_(AssetActive.name, AssetActive.uri).in_( + attrs.astuple(key) for key in all_asset_unique_keys + ) + ) + ) + } + different = all_asset_unique_keys - active_asset_unique_keys + + return InactiveAssetsResponse( + inactive_assets=[ + asset_unique_key.to_asset().asprofile() # type: ignore + for asset_unique_key in different + ] + ) + + # This line should be at the end of the file to ensure all routes are registered router.include_router(ti_id_router) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_reschedules.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_reschedules.py index d3e940f47a08f..f763858f9b9c1 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_reschedules.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_reschedules.py @@ -19,15 +19,14 @@ from uuid import UUID -from fastapi import status +from fastapi import APIRouter, status from sqlalchemy import select from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.common.types import UtcDateTime from airflow.models.taskreschedule import TaskReschedule -router = AirflowRouter( +router = APIRouter( responses={ status.HTTP_404_NOT_FOUND: {"description": "Task Instance not found"}, status.HTTP_401_UNAUTHORIZED: {"description": "Unauthorized"}, diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/xcoms.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/xcoms.py index 96483e4f08d9a..31bfcb31a0a3a 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/xcoms.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/xcoms.py @@ -21,14 +21,17 @@ import sys from typing import Annotated, Any -from fastapi import Body, Depends, HTTPException, Path, Query, Request, Response, status -from pydantic import BaseModel, JsonValue +from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request, Response, status +from pydantic import BaseModel, JsonValue, StringConstraints from sqlalchemy import delete from sqlalchemy.sql.selectable import Select from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.router import AirflowRouter -from airflow.api_fastapi.execution_api.datamodels.xcom import XComResponse +from airflow.api_fastapi.execution_api.datamodels.xcom import ( + XComResponse, + XComSequenceIndexResponse, + XComSequenceSliceResponse, +) from airflow.api_fastapi.execution_api.deps import JWTBearerDep from airflow.models.taskmap import TaskMap from airflow.models.xcom import XComModel @@ -57,7 +60,7 @@ async def has_xcom_access( return True -router = AirflowRouter( +router = APIRouter( responses={ status.HTTP_401_UNAUTHORIZED: {"description": "Unauthorized"}, status.HTTP_403_FORBIDDEN: {"description": "Task does not have access to the XCom"}, @@ -95,7 +98,7 @@ async def xcom_query( "description": "Metadata about the number of matching XCom values", "headers": { "Content-Range": { - "pattern": r"^map_indexes \d+$", + "schema": {"pattern": r"^map_indexes \d+$"}, "description": "The number of (mapped) XCom values found for this task.", }, }, @@ -127,6 +130,7 @@ class GetXcomFilterParams(BaseModel): map_index: int = -1 include_prior_dates: bool = False + offset: int | None = None @router.get( @@ -137,23 +141,28 @@ def get_xcom( dag_id: str, run_id: str, task_id: str, - key: str, + key: Annotated[str, StringConstraints(min_length=1)], session: SessionDep, params: Annotated[GetXcomFilterParams, Query()], ) -> XComResponse: """Get an Airflow XCom from database - not other XCom Backends.""" - # The xcom_query allows no map_index to be passed. This endpoint should always return just a single item, - # so we override that query value xcom_query = XComModel.get_many( run_id=run_id, key=key, task_ids=task_id, dag_ids=dag_id, - map_indexes=params.map_index, include_prior_dates=params.include_prior_dates, session=session, ) - xcom_query = xcom_query.filter(XComModel.map_index == params.map_index) + if params.offset is not None: + xcom_query = xcom_query.filter(XComModel.value.is_not(None)).order_by(None) + if params.offset >= 0: + xcom_query = xcom_query.order_by(XComModel.map_index.asc()).offset(params.offset) + else: + xcom_query = xcom_query.order_by(XComModel.map_index.desc()).offset(-1 - params.offset) + else: + xcom_query = xcom_query.filter(XComModel.map_index == params.map_index) + # We use `BaseXCom.get_many` to fetch XComs directly from the database, bypassing the XCom Backend. # This avoids deserialization via the backend (e.g., from a remote storage like S3) and instead # retrieves the raw serialized value from the database. By not relying on `XCom.get_many` or `XCom.get_one` @@ -161,18 +170,152 @@ def get_xcom( # performance hits from retrieving large data files into the API server. result = xcom_query.limit(1).first() if result is None: - map_index = params.map_index + if params.offset is None: + message = ( + f"XCom with {key=} map_index={params.map_index} not found for " + f"task {task_id!r} in DAG run {run_id!r} of {dag_id!r}" + ) + else: + message = ( + f"XCom with {key=} offset={params.offset} not found for " + f"task {task_id!r} in DAG run {run_id!r} of {dag_id!r}" + ) raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail={ - "reason": "not_found", - "message": f"XCom with {key=} {map_index=} not found for task {task_id!r} in DAG run {run_id!r} of {dag_id!r}", - }, + detail={"reason": "not_found", "message": message}, ) return XComResponse(key=key, value=result.value) +@router.get( + "/{dag_id}/{run_id}/{task_id}/{key}/item/{offset}", + description="Get a single XCom value from a mapped task by sequence index", +) +def get_mapped_xcom_by_index( + dag_id: str, + run_id: str, + task_id: str, + key: str, + offset: int, + session: SessionDep, +) -> XComSequenceIndexResponse: + xcom_query = XComModel.get_many( + run_id=run_id, + key=key, + task_ids=task_id, + dag_ids=dag_id, + session=session, + ) + xcom_query = xcom_query.order_by(None) + if offset >= 0: + xcom_query = xcom_query.order_by(XComModel.map_index.asc()).offset(offset) + else: + xcom_query = xcom_query.order_by(XComModel.map_index.desc()).offset(-1 - offset) + + if (result := xcom_query.limit(1).first()) is None: + message = ( + f"XCom with {key=} {offset=} not found for task {task_id!r} in DAG run {run_id!r} of {dag_id!r}" + ) + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={"reason": "not_found", "message": message}, + ) + return XComSequenceIndexResponse(result.value) + + +class GetXComSliceFilterParams(BaseModel): + """Class to house slice params.""" + + start: int | None = None + stop: int | None = None + step: int | None = None + include_prior_dates: bool = False + + +@router.get( + "/{dag_id}/{run_id}/{task_id}/{key}/slice", + description="Get XCom values from a mapped task by sequence slice", +) +def get_mapped_xcom_by_slice( + dag_id: str, + run_id: str, + task_id: str, + key: str, + params: Annotated[GetXComSliceFilterParams, Query()], + session: SessionDep, +) -> XComSequenceSliceResponse: + query = XComModel.get_many( + run_id=run_id, + key=key, + task_ids=task_id, + dag_ids=dag_id, + include_prior_dates=params.include_prior_dates, + session=session, + ) + query = query.order_by(None) + + step = params.step or 1 + + # We want to optimize negative slicing (e.g. seq[-10:]) by not doing an + # additional COUNT query if possible. This is possible unless both start and + # stop are explicitly given and have different signs. + if (start := params.start) is None: + if (stop := params.stop) is None: + if step >= 0: + query = query.order_by(XComModel.map_index.asc()) + else: + query = query.order_by(XComModel.map_index.desc()) + step = -step + elif stop >= 0: + query = query.order_by(XComModel.map_index.asc()) + if step >= 0: + query = query.limit(stop) + else: + query = query.offset(stop + 1) + else: + query = query.order_by(XComModel.map_index.desc()) + step = -step + if step > 0: + query = query.limit(-stop - 1) + else: + query = query.offset(-stop) + elif start >= 0: + query = query.order_by(XComModel.map_index.asc()) + if (stop := params.stop) is None: + if step >= 0: + query = query.offset(start) + else: + query = query.limit(start + 1) + else: + if stop < 0: + stop += get_query_count(query, session=session) + if step >= 0: + query = query.slice(start, stop) + else: + query = query.slice(stop + 1, start + 1) + else: + query = query.order_by(XComModel.map_index.desc()) + step = -step + if (stop := params.stop) is None: + if step > 0: + query = query.offset(-start - 1) + else: + query = query.limit(-start) + else: + if stop >= 0: + stop -= get_query_count(query, session=session) + if step > 0: + query = query.slice(-1 - start, -1 - stop) + else: + query = query.slice(-stop, -start) + + values = [row.value for row in query.with_entities(XComModel.value)] + if step != 1: + values = values[::step] + return XComSequenceSliceResponse(values) + + if sys.version_info < (3, 12): # zmievsa/cadwyn#262 # Setting this to "Any" doesn't have any impact on the API as it has to be parsed as valid JSON regardless @@ -189,7 +332,7 @@ def set_xcom( dag_id: str, run_id: str, task_id: str, - key: str, + key: Annotated[str, StringConstraints(min_length=1)], value: Annotated[ JsonValue, Body( @@ -219,6 +362,17 @@ def set_xcom( """Set an Airflow XCom.""" from airflow.configuration import conf + # Validate that the provided key is not empty + # XCom keys must be non-empty strings to ensure proper data retrieval and avoid ambiguity. + if not key: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail={ + "reason": "invalid_key", + "message": "XCom key must be a non-empty string.", + }, + ) + if mapped_length is not None: task_map = TaskMap( dag_id=dag_id, diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py index 0d3a225305b67..1fe0b9d155fc6 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py @@ -19,7 +19,23 @@ from cadwyn import HeadVersion, Version, VersionBundle +from airflow.api_fastapi.execution_api.versions.v2025_04_28 import AddRenderedMapIndexField +from airflow.api_fastapi.execution_api.versions.v2025_05_20 import DowngradeUpstreamMapIndexes +from airflow.api_fastapi.execution_api.versions.v2025_08_10 import ( + AddDagRunStateFieldAndPreviousEndpoint, + AddDagVersionIdField, + AddIncludePriorDatesToGetXComSlice, +) + bundle = VersionBundle( HeadVersion(), + Version( + "2025-08-10", + AddDagVersionIdField, + AddDagRunStateFieldAndPreviousEndpoint, + AddIncludePriorDatesToGetXComSlice, + ), + Version("2025-05-20", DowngradeUpstreamMapIndexes), + Version("2025-04-28", AddRenderedMapIndexField), Version("2025-04-11"), ) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_04_28.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_04_28.py new file mode 100644 index 0000000000000..e0916b4c93d67 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_04_28.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from cadwyn import VersionChange, schema + +from airflow.api_fastapi.execution_api.datamodels.taskinstance import ( + TIDeferredStatePayload, + TIRetryStatePayload, + TISuccessStatePayload, + TITerminalStatePayload, +) + + +class AddRenderedMapIndexField(VersionChange): + """Add the `rendered_map_index` field to payload models.""" + + description = __doc__ + + instructions_to_migrate_to_previous_version = ( + schema(TITerminalStatePayload).field("rendered_map_index").didnt_exist, + schema(TISuccessStatePayload).field("rendered_map_index").didnt_exist, + schema(TIDeferredStatePayload).field("rendered_map_index").didnt_exist, + schema(TIRetryStatePayload).field("rendered_map_index").didnt_exist, + ) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py new file mode 100644 index 0000000000000..8ea59844d985e --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for, schema + +from airflow.api_fastapi.execution_api.datamodels.taskinstance import TIRunContext + + +class DowngradeUpstreamMapIndexes(VersionChange): + """Downgrade the upstream map indexes type for older clients.""" + + description = __doc__ + + instructions_to_migrate_to_previous_version = ( + schema(TIRunContext).field("upstream_map_indexes").had(type=dict[str, int | None] | None), + ) + + @convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type] + def downgrade_upstream_map_indexes(response: ResponseInfo = None) -> None: # type: ignore + """ + Downgrades the `upstream_map_indexes` field when converting to the previous version. + + Ensures that the field is only a dictionary of [str, int] (old format). + """ + resp = response.body.get("upstream_map_indexes") + if isinstance(resp, dict): + downgraded: dict[str, int | list | None] = {} + for k, v in resp.items(): + if isinstance(v, int): + downgraded[k] = v + elif isinstance(v, list) and v and all(isinstance(i, int) for i in v): + downgraded[k] = v[0] + else: + # Keep values like None as is — the Task SDK expects them unchanged during mapped task expansion, + # and modifying them can cause unexpected failures. + downgraded[k] = None + response.body["upstream_map_indexes"] = downgraded diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py new file mode 100644 index 0000000000000..c6c95c7dfdc38 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for, endpoint, schema + +from airflow.api_fastapi.execution_api.datamodels.taskinstance import DagRun, TaskInstance, TIRunContext +from airflow.api_fastapi.execution_api.routes.xcoms import GetXComSliceFilterParams + + +class AddDagVersionIdField(VersionChange): + """Add the `dag_version_id` field to the TaskInstance model.""" + + description = __doc__ + + instructions_to_migrate_to_previous_version = (schema(TaskInstance).field("dag_version_id").didnt_exist,) + + +class AddDagRunStateFieldAndPreviousEndpoint(VersionChange): + """Add the `state` field to DagRun model and `/dag-runs/{dag_id}/previous` endpoint.""" + + description = __doc__ + + instructions_to_migrate_to_previous_version = ( + schema(DagRun).field("state").didnt_exist, + endpoint("/dag-runs/{dag_id}/previous", ["GET"]).didnt_exist, + ) + + @convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type] + def remove_state_from_dag_run(response: ResponseInfo) -> None: # type: ignore[misc] + """Remove the `state` field from the dag_run object when converting to the previous version.""" + if "dag_run" in response.body and isinstance(response.body["dag_run"], dict): + response.body["dag_run"].pop("state", None) + + +class AddIncludePriorDatesToGetXComSlice(VersionChange): + """Add the `include_prior_dates` field to GetXComSliceFilterParams.""" + + description = __doc__ + + instructions_to_migrate_to_previous_version = ( + schema(GetXComSliceFilterParams).field("include_prior_dates").didnt_exist, + ) diff --git a/airflow-core/src/airflow/api_fastapi/gunicorn_config.py b/airflow-core/src/airflow/api_fastapi/gunicorn_config.py deleted file mode 100644 index f2c17eef76d4f..0000000000000 --- a/airflow-core/src/airflow/api_fastapi/gunicorn_config.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import setproctitle - -from airflow import settings - - -def post_worker_init(_): - """ - Set process title. - - This is used by airflow.cli.commands.api_server_command to track the status of the worker. - """ - old_title = setproctitle.getproctitle() - setproctitle.setproctitle(settings.GUNICORN_WORKER_READY_PREFIX + old_title) diff --git a/airflow-core/src/airflow/api_fastapi/logging/decorators.py b/airflow-core/src/airflow/api_fastapi/logging/decorators.py index 1e5ad9fbd930e..ed4a8554d959a 100644 --- a/airflow-core/src/airflow/api_fastapi/logging/decorators.py +++ b/airflow-core/src/airflow/api_fastapi/logging/decorators.py @@ -19,15 +19,13 @@ import itertools import json import logging -from typing import Annotated import pendulum -from fastapi import Depends, Request +from fastapi import Request from pendulum.parsing.exceptions import ParserError -from airflow.api_fastapi.auth.managers.models.base_user import BaseUser from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.core_api.security import get_user_with_exception_handling +from airflow.api_fastapi.core_api.security import GetUserDep from airflow.models import Log from airflow.sdk.execution_time import secrets_masker @@ -40,9 +38,12 @@ def _mask_connection_fields(extra_fields): for k, v in extra_fields.items(): if k == "extra" and v: try: - extra = json.loads(v) - extra = {k: secrets_masker.redact(v, k) for k, v in extra.items()} - result[k] = dict(extra) + parsed_extra = json.loads(v) + if isinstance(parsed_extra, dict): + masked_extra = {ek: secrets_masker.redact(ev, ek) for ek, ev in parsed_extra.items()} + result[k] = masked_extra + else: + result[k] = "Expected JSON object in `extra` field, got non-dict JSON" except json.JSONDecodeError: result[k] = "Encountered non-JSON in `extra` field" else: @@ -76,7 +77,7 @@ def action_logging(event: str | None = None): async def log_action( request: Request, session: SessionDep, - user: Annotated[BaseUser, Depends(get_user_with_exception_handling)], + user: GetUserDep, ): """Log user actions.""" event_name = event or request.scope["endpoint"].__name__ @@ -88,9 +89,9 @@ async def log_action( user_name = user.get_name() user_display = user.get_name() - hasJsonBody = "application/json" in request.headers.get("content-type", "") and await request.body() + has_json_body = "application/json" in request.headers.get("content-type", "") and await request.body() - if hasJsonBody: + if has_json_body: request_body = await request.json() masked_body_json = {k: secrets_masker.redact(v, k) for k, v in request_body.items()} else: @@ -115,13 +116,13 @@ async def log_action( } if "variable" in event_name: extra_fields = _mask_variable_fields( - {k: v for k, v in request_body.items()} if hasJsonBody else extra_fields + {k: v for k, v in request_body.items()} if has_json_body else extra_fields ) elif "connection" in event_name: extra_fields = _mask_connection_fields( - {k: v for k, v in request_body.items()} if hasJsonBody else extra_fields + {k: v for k, v in request_body.items()} if has_json_body else extra_fields ) - elif hasJsonBody: + elif has_json_body: extra_fields = {**extra_fields, **masked_body_json} params = { @@ -129,7 +130,7 @@ async def log_action( **request.path_params, } - if hasJsonBody: + if has_json_body: params.update(masked_body_json) if params and "is_paused" in params: extra_fields["is_paused"] = params["is_paused"] == "false" diff --git a/airflow-core/src/airflow/assets/manager.py b/airflow-core/src/airflow/assets/manager.py index 36882e060cbf5..a00c7cae27d1d 100644 --- a/airflow-core/src/airflow/assets/manager.py +++ b/airflow-core/src/airflow/assets/manager.py @@ -130,7 +130,7 @@ def register_asset_change( .options( joinedload(AssetModel.active), joinedload(AssetModel.aliases), - joinedload(AssetModel.consuming_dags).joinedload(DagScheduleAssetReference.dag), + joinedload(AssetModel.scheduled_dags).joinedload(DagScheduleAssetReference.dag), ) ) if not asset_model: @@ -161,7 +161,7 @@ def register_asset_change( session.flush() # Ensure the event is written earlier than DDRQ entries below. dags_to_queue_from_asset = { - ref.dag for ref in asset_model.consuming_dags if not ref.dag.is_stale and not ref.dag.is_paused + ref.dag for ref in asset_model.scheduled_dags if not ref.dag.is_stale and not ref.dag.is_paused } dags_to_queue_from_asset_alias = set() @@ -170,7 +170,7 @@ def register_asset_change( select(AssetAliasModel) .where(AssetAliasModel.name.in_(source_alias_names)) .options( - joinedload(AssetAliasModel.consuming_dags).joinedload(DagScheduleAssetAliasReference.dag) + joinedload(AssetAliasModel.scheduled_dags).joinedload(DagScheduleAssetAliasReference.dag) ) ).unique() @@ -180,7 +180,7 @@ def register_asset_change( dags_to_queue_from_asset_alias |= { alias_ref.dag - for alias_ref in asset_alias_model.consuming_dags + for alias_ref in asset_alias_model.scheduled_dags if not alias_ref.dag.is_stale and not alias_ref.dag.is_paused } diff --git a/airflow-core/src/airflow/callbacks/callback_requests.py b/airflow-core/src/airflow/callbacks/callback_requests.py index 8cf8c77035737..611f9ac7b4d7f 100644 --- a/airflow-core/src/airflow/callbacks/callback_requests.py +++ b/airflow-core/src/airflow/callbacks/callback_requests.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Annotated, Literal, Union +from typing import TYPE_CHECKING, Annotated, Literal from pydantic import BaseModel, Field @@ -61,6 +61,8 @@ class TaskCallbackRequest(BaseCallbackRequest): """Simplified Task Instance representation""" task_callback_type: TaskInstanceState | None = None """Whether on success, on failure, on retry""" + context_from_server: ti_datamodel.TIRunContext | None = None + """Task execution context from the Server""" type: Literal["TaskCallbackRequest"] = "TaskCallbackRequest" @property @@ -75,17 +77,25 @@ def is_failure_callback(self) -> bool: } +class DagRunContext(BaseModel): + """Class to pass context info from the server to build a Execution context object.""" + + dag_run: ti_datamodel.DagRun | None = None + last_ti: ti_datamodel.TaskInstance | None = None + + class DagCallbackRequest(BaseCallbackRequest): """A Class with information about the success/failure DAG callback to be executed.""" dag_id: str run_id: str + context_from_server: DagRunContext | None = None is_failure_callback: bool | None = True """Flag to determine whether it is a Failure Callback or Success Callback""" type: Literal["DagCallbackRequest"] = "DagCallbackRequest" CallbackRequest = Annotated[ - Union[DagCallbackRequest, TaskCallbackRequest], + DagCallbackRequest | TaskCallbackRequest, Field(discriminator="type"), ] diff --git a/airflow-core/src/airflow/callbacks/pipe_callback_sink.py b/airflow-core/src/airflow/callbacks/pipe_callback_sink.py deleted file mode 100644 index 7f6a2883743df..0000000000000 --- a/airflow-core/src/airflow/callbacks/pipe_callback_sink.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import contextlib -from typing import TYPE_CHECKING, Callable - -from airflow.callbacks.base_callback_sink import BaseCallbackSink - -if TYPE_CHECKING: - from multiprocessing.connection import Connection as MultiprocessingConnection - - from airflow.callbacks.callback_requests import CallbackRequest - - -class PipeCallbackSink(BaseCallbackSink): - """ - Class for sending callbacks to DagProcessor using pipe. - - It is used when DagProcessor is not executed in standalone mode. - """ - - def __init__(self, get_sink_pipe: Callable[[], MultiprocessingConnection]): - self._get_sink_pipe = get_sink_pipe - - def send(self, callback: CallbackRequest): - """ - Send information about the callback to be executed by Pipe. - - :param callback: Callback request to be executed. - """ - with contextlib.suppress(ConnectionError): - # If this died cos of an error then we will noticed and restarted - # when harvest_serialized_dags calls _heartbeat_manager. - self._get_sink_pipe().send(callback) diff --git a/airflow-core/src/airflow/cli/cli_config.py b/airflow-core/src/airflow/cli/cli_config.py index 1122033dc94cd..dce474e0b0596 100644 --- a/airflow-core/src/airflow/cli/cli_config.py +++ b/airflow-core/src/airflow/cli/cli_config.py @@ -24,17 +24,17 @@ import json import os import textwrap -from collections.abc import Iterable -from typing import Callable, NamedTuple, Union +from collections.abc import Callable, Iterable +from typing import NamedTuple import lazy_object_proxy +from airflow._shared.timezones.timezone import parse as parsedate from airflow.cli.commands.legacy_commands import check_legacy_command from airflow.configuration import conf from airflow.utils.cli import ColorMode from airflow.utils.module_loading import import_string from airflow.utils.state import DagRunState, JobState -from airflow.utils.timezone import parse as parsedate BUILD_DOCS = "BUILDING_AIRFLOW_DOCS" in os.environ @@ -177,11 +177,6 @@ def string_lower_type(val): type=str, default="[CWD]" if BUILD_DOCS else os.getcwd(), ) -ARG_DRY_RUN = Arg( - ("-n", "--dry-run"), - help="Perform a dry run for each task. Only renders Template Fields for each task, nothing else", - action="store_true", -) ARG_PID = Arg(("--pid",), help="PID file location", nargs="?") ARG_DAEMON = Arg( ("-D", "--daemon"), help="Daemonize instead of running in the foreground", action="store_true" @@ -233,6 +228,13 @@ def string_lower_type(val): action="store_true", ) +# list_dags +ARG_LIST_LOCAL = Arg( + ("-l", "--local"), + action="store_true", + help="Shows local parsed DAGs and their import errors, ignores content serialized in DB", +) + # list_dag_runs ARG_NO_BACKFILL = Arg( ("--no-backfill",), help="filter all the backfill dagruns given the dag id", action="store_true" @@ -338,6 +340,14 @@ def string_lower_type(val): ), choices=("none", "completed", "failed"), ) +ARG_BACKFILL_RUN_ON_LATEST_VERSION = Arg( + ("--run-on-latest-version",), + help=( + "(Experimental) If set, the backfill will run tasks using the latest bundle version instead of " + "the version that was active when the original Dag run was created." + ), + action="store_true", +) # misc @@ -348,6 +358,13 @@ def string_lower_type(val): ) # test_dag +ARG_DAGFILE_PATH = Arg( + ( + "-f", + "--dagfile-path", + ), + help="Path to the dag file. Can be absolute or relative to current directory", +) ARG_SHOW_DAGRUN = Arg( ("--show-dagrun",), help=( @@ -474,6 +491,15 @@ def string_lower_type(val): type=positive_int(allow_zero=False), help="Wait time between retries in seconds", ) +ARG_DB_BATCH_SIZE = Arg( + ("--batch-size",), + default=None, + type=positive_int(allow_zero=False), + help=( + "Maximum number of rows to delete or archive in a single transaction.\n" + "Lower values reduce long-running locks but increase the number of batches." + ), +) # pool ARG_POOL_NAME = Arg(("pool",), metavar="NAME", help="Pool name") @@ -514,7 +540,7 @@ def string_lower_type(val): ) ARG_DESERIALIZE_JSON = Arg(("-j", "--json"), help="Deserialize JSON variable", action="store_true") ARG_SERIALIZE_JSON = Arg(("-j", "--json"), help="Serialize JSON variable", action="store_true") -ARG_VAR_IMPORT = Arg(("file",), help="Import variables from JSON file") +ARG_VAR_IMPORT = Arg(("file",), help="Import variables from .env, .json, .yaml or .yml file") ARG_VAR_EXPORT = Arg( ("file",), help="Export all variables to JSON file", @@ -585,6 +611,12 @@ def string_lower_type(val): default=False, ) +ARG_DB_MANAGER_PATH = Arg( + ("import_path",), + help="The import path of the database manager to use. ", + default=None, +) + # api-server ARG_API_SERVER_PORT = Arg( ("-p", "--port"), @@ -609,10 +641,10 @@ def string_lower_type(val): default=conf.get("api", "host"), help="Set the host on which to run the API server", ) -ARG_API_SERVER_ACCESS_LOGFILE = Arg( - ("-A", "--access-logfile"), - default=conf.get("api", "access_logfile"), - help="The logfile to store the access log. Use '-' to print to stdout", +ARG_API_SERVER_LOG_CONFIG = Arg( + ("--log-config",), + default=conf.get("api", "log_config", fallback=None), + help="(Optional) Path to the logging configuration file for the uvicorn server. If not set, the default uvicorn logging configuration will be used.", ) ARG_API_SERVER_APPS = Arg( ("--apps",), @@ -908,7 +940,7 @@ class GroupCommand(NamedTuple): epilog: str | None = None -CLICommand = Union[ActionCommand, GroupCommand] +CLICommand = ActionCommand | GroupCommand ASSETS_COMMANDS = ( ActionCommand( @@ -944,6 +976,7 @@ class GroupCommand(NamedTuple): ARG_RUN_BACKWARDS, ARG_MAX_ACTIVE_RUNS, ARG_BACKFILL_REPROCESS_BEHAVIOR, + ARG_BACKFILL_RUN_ON_LATEST_VERSION, ARG_BACKFILL_DRY_RUN, ), ), @@ -959,13 +992,13 @@ class GroupCommand(NamedTuple): name="list", help="List all the DAGs", func=lazy_load_command("airflow.cli.commands.dag_command.dag_list_dags"), - args=(ARG_OUTPUT, ARG_VERBOSE, ARG_DAG_LIST_COLUMNS, ARG_BUNDLE_NAME), + args=(ARG_OUTPUT, ARG_VERBOSE, ARG_DAG_LIST_COLUMNS, ARG_BUNDLE_NAME, ARG_LIST_LOCAL), ), ActionCommand( name="list-import-errors", help="List all the DAGs that have import errors", func=lazy_load_command("airflow.cli.commands.dag_command.dag_list_import_errors"), - args=(ARG_BUNDLE_NAME, ARG_OUTPUT, ARG_VERBOSE), + args=(ARG_BUNDLE_NAME, ARG_OUTPUT, ARG_VERBOSE, ARG_LIST_LOCAL), ), ActionCommand( name="report", @@ -1117,6 +1150,16 @@ class GroupCommand(NamedTuple): description=( "Execute one single DagRun for a given DAG and logical date.\n" "\n" + "You can test a DAG in three ways:\n" + "1. Using default bundle:\n" + " airflow dags test \n" + "\n" + "2. Using a specific bundle if multiple DAG bundles are configured:\n" + " airflow dags test --bundle-name (or -B )\n" + "\n" + "3. Using a specific DAG file:\n" + " airflow dags test --dagfile-path (or -f )\n" + "\n" "The --imgcat-dagrun option only works in iTerm.\n" "\n" "For more information, see: https://www.iterm2.com/documentation-images.html\n" @@ -1137,6 +1180,8 @@ class GroupCommand(NamedTuple): args=( ARG_DAG_ID, ARG_LOGICAL_DATE_OPTIONAL, + ARG_BUNDLE_NAME, + ARG_DAGFILE_PATH, ARG_CONF, ARG_SHOW_DAGRUN, ARG_IMGCAT_DAGRUN, @@ -1244,7 +1289,6 @@ class GroupCommand(NamedTuple): ARG_TASK_ID, ARG_LOGICAL_DATE_OR_RUN_ID_OPTIONAL, ARG_BUNDLE_NAME, - ARG_DRY_RUN, ARG_TASK_PARAMS, ARG_POST_MORTEM, ARG_ENV_VARS, @@ -1426,6 +1470,7 @@ class GroupCommand(NamedTuple): ARG_VERBOSE, ARG_YES, ARG_DB_SKIP_ARCHIVE, + ARG_DB_BATCH_SIZE, ), ), ActionCommand( @@ -1590,6 +1635,12 @@ class GroupCommand(NamedTuple): func=lazy_load_command("airflow.cli.commands.provider_command.executors_list"), args=(ARG_OUTPUT, ARG_VERBOSE), ), + ActionCommand( + name="queues", + help="Get information about queues provided", + func=lazy_load_command("airflow.cli.commands.provider_command.queues_list"), + args=(ARG_OUTPUT, ARG_VERBOSE), + ), ActionCommand( name="notifications", help="Get information about notifications provided", @@ -1699,6 +1750,59 @@ class GroupCommand(NamedTuple): ), ) +DB_MANAGERS_COMMANDS = ( + ActionCommand( + name="reset", + help="Burn down and rebuild the specified external database", + func=lazy_load_command("airflow.cli.commands.db_manager_command.resetdb"), + args=(ARG_DB_MANAGER_PATH, ARG_YES, ARG_DB_SKIP_INIT, ARG_VERBOSE), + ), + ActionCommand( + name="migrate", + help="Migrates the specified external database to the latest version", + description=( + "Migrate the schema of the metadata database. " + "Create the database if it does not exist " + "To print but not execute commands, use option ``--show-sql-only``. " + "If using options ``--from-revision`` or ``--from-version``, you must also use " + "``--show-sql-only``, because if actually *running* migrations, we should only " + "migrate from the *current* Alembic revision." + ), + func=lazy_load_command("airflow.cli.commands.db_manager_command.migratedb"), + args=( + ARG_DB_MANAGER_PATH, + ARG_DB_REVISION__UPGRADE, + ARG_DB_VERSION__UPGRADE, + ARG_DB_SQL_ONLY, + ARG_DB_FROM_REVISION, + ARG_DB_FROM_VERSION, + ARG_VERBOSE, + ), + ), + ActionCommand( + name="downgrade", + help="Downgrade the schema of the external metadata database.", + description=( + "Downgrade the schema of the metadata database. " + "You must provide either `--to-revision` or `--to-version`. " + "To print but not execute commands, use option `--show-sql-only`. " + "If using options `--from-revision` or `--from-version`, you must also use `--show-sql-only`, " + "because if actually *running* migrations, we should only migrate from the *current* Alembic " + "revision." + ), + func=lazy_load_command("airflow.cli.commands.db_manager_command.downgrade"), + args=( + ARG_DB_MANAGER_PATH, + ARG_DB_REVISION__DOWNGRADE, + ARG_DB_VERSION__DOWNGRADE, + ARG_DB_SQL_ONLY, + ARG_YES, + ARG_DB_FROM_REVISION, + ARG_DB_FROM_VERSION, + ARG_VERBOSE, + ), + ), +) core_commands: list[CLICommand] = [ GroupCommand( name="dags", @@ -1769,7 +1873,7 @@ class GroupCommand(NamedTuple): ARG_DAEMON, ARG_STDOUT, ARG_STDERR, - ARG_API_SERVER_ACCESS_LOGFILE, + ARG_API_SERVER_LOG_CONFIG, ARG_API_SERVER_APPS, ARG_LOG_FILE, ARG_SSL_CERT, @@ -1888,6 +1992,11 @@ class GroupCommand(NamedTuple): func=lazy_load_command("airflow.cli.commands.standalone_command.standalone"), args=(), ), + GroupCommand( + name="db-manager", + help="Manage externally connected database managers", + subcommands=DB_MANAGERS_COMMANDS, + ), ] diff --git a/airflow-core/src/airflow/cli/commands/api_server_command.py b/airflow-core/src/airflow/cli/commands/api_server_command.py index 343890aa8e4e8..ccc79a1093804 100644 --- a/airflow-core/src/airflow/cli/commands/api_server_command.py +++ b/airflow-core/src/airflow/cli/commands/api_server_command.py @@ -21,33 +21,113 @@ import logging import os import subprocess +import sys import textwrap +from collections.abc import Callable +from functools import wraps +from typing import TYPE_CHECKING, TypeVar import uvicorn -from gunicorn.util import daemonize -from setproctitle import setproctitle from airflow import settings +from airflow.cli.commands.daemon_utils import run_command_with_daemon_option from airflow.exceptions import AirflowConfigException +from airflow.typing_compat import ParamSpec from airflow.utils import cli as cli_utils from airflow.utils.providers_configuration_loader import providers_configuration_loaded +PS = ParamSpec("PS") +RT = TypeVar("RT") + log = logging.getLogger(__name__) +if TYPE_CHECKING: + from argparse import Namespace # This shouldn't be necessary but there seems to be an issue in uvloop that causes bad file descriptor # errors when shutting down workers. Despite the 'closed' status of the issue it is not solved, # more info here: https://github.com/benoitc/gunicorn/issues/1877#issuecomment-1911136399 +def _run_api_server(args, apps: str, num_workers: int, worker_timeout: int, proxy_headers: bool): + """Run the API server.""" + log.info( + textwrap.dedent( + f"""\ + Running the uvicorn with: + Apps: {apps} + Workers: {num_workers} + Host: {args.host}:{args.port} + Timeout: {worker_timeout} + Logfiles: {args.log_file or "-"} + =================================================================""" + ) + ) + # get ssl cert and key filepaths here instead of passing them as arguments to reduce the number of arguments + ssl_cert, ssl_key = _get_ssl_cert_and_key_filepaths(args) + + # setproctitle causes issue on Mac OS: https://github.com/benoitc/gunicorn/issues/3021 + os_type = sys.platform + if os_type == "darwin": + log.debug("Mac OS detected, skipping setproctitle") + else: + from setproctitle import setproctitle + + setproctitle(f"airflow api_server -- host:{args.host} port:{args.port}") + + uvicorn_kwargs = { + "host": args.host, + "port": args.port, + "workers": num_workers, + "timeout_keep_alive": worker_timeout, + "timeout_graceful_shutdown": worker_timeout, + "ssl_keyfile": ssl_key, + "ssl_certfile": ssl_cert, + "access_log": True, + "proxy_headers": proxy_headers, + } + # Only set the log_config if it is provided, otherwise use the default uvicorn logging configuration. + if args.log_config and args.log_config != "-": + # The [api/log_config] is migrated from [api/access_logfile] and [api/access_logfile] defaults to "-" for stdout for Gunicorn. + # So we need to check if the log_config is set to "-" or not; if it is set to "-", we regard it as not set. + uvicorn_kwargs["log_config"] = args.log_config + + uvicorn.run( + "airflow.api_fastapi.main:app", + **uvicorn_kwargs, + ) + + +def with_api_apps_env(func: Callable[[Namespace], RT]) -> Callable[[Namespace], RT]: + """We use AIRFLOW_API_APPS to specify which apps are initialized in the API server.""" + + @wraps(func) + def wrapper(args: Namespace) -> RT: + apps: str = args.apps + original_value = os.environ.get("AIRFLOW_API_APPS") + try: + log.debug("Setting AIRFLOW_API_APPS to: %s", apps) + os.environ["AIRFLOW_API_APPS"] = apps + return func(args) + finally: + if original_value is not None: + os.environ["AIRFLOW_API_APPS"] = original_value + log.debug("Restored AIRFLOW_API_APPS to: %s", original_value) + else: + os.environ.pop("AIRFLOW_API_APPS", None) + log.debug("Removed AIRFLOW_API_APPS from environment") + + return wrapper + + @cli_utils.action_cli @providers_configuration_loaded -def api_server(args): +@with_api_apps_env +def api_server(args: Namespace): """Start Airflow API server.""" print(settings.HEADER) apps = args.apps - access_logfile = args.access_logfile or "-" num_workers = args.workers worker_timeout = args.worker_timeout proxy_headers = args.proxy_headers @@ -74,46 +154,25 @@ def api_server(args): if args.proxy_headers: run_args.append("--proxy-headers") - # There is no way to pass the apps to airflow/api_fastapi/main.py in the development mode - # because fastapi dev command does not accept any additional arguments - # so environment variable is being used to pass it - os.environ["AIRFLOW_API_APPS"] = apps + if args.log_config and args.log_config != "-": + run_args.extend(["--log-config", args.log_config]) + with subprocess.Popen( run_args, close_fds=True, ) as process: process.wait() - os.environ.pop("AIRFLOW_API_APPS") else: - if args.daemon: - daemonize() - log.info("Daemonized the API server process PID: %s", os.getpid()) - - log.info( - textwrap.dedent( - f"""\ - Running the uvicorn with: - Apps: {apps} - Workers: {num_workers} - Host: {args.host}:{args.port} - Timeout: {worker_timeout} - Logfiles: {access_logfile} - =================================================================""" - ) - ) - ssl_cert, ssl_key = _get_ssl_cert_and_key_filepaths(args) - setproctitle(f"airflow api_server -- host:{args.host} port:{args.port}") - uvicorn.run( - "airflow.api_fastapi.main:app", - host=args.host, - port=args.port, - workers=num_workers, - timeout_keep_alive=worker_timeout, - timeout_graceful_shutdown=worker_timeout, - ssl_keyfile=ssl_key, - ssl_certfile=ssl_cert, - access_log=access_logfile, - proxy_headers=proxy_headers, + run_command_with_daemon_option( + args=args, + process_name="api_server", + callback=lambda: _run_api_server( + args=args, + apps=apps, + num_workers=num_workers, + worker_timeout=worker_timeout, + proxy_headers=proxy_headers, + ), ) diff --git a/airflow-core/src/airflow/cli/commands/asset_command.py b/airflow-core/src/airflow/cli/commands/asset_command.py index 02fc9f7bd98d9..3b4685a0d1bf2 100644 --- a/airflow-core/src/airflow/cli/commands/asset_command.py +++ b/airflow-core/src/airflow/cli/commands/asset_command.py @@ -26,8 +26,10 @@ from airflow.api_fastapi.core_api.datamodels.assets import AssetAliasResponse, AssetResponse from airflow.api_fastapi.core_api.datamodels.dag_run import DAGRunResponse from airflow.cli.simple_table import AirflowConsole +from airflow.exceptions import AirflowConfigException from airflow.models.asset import AssetAliasModel, AssetModel, TaskOutletAssetReference from airflow.utils import cli as cli_utils +from airflow.utils.platform import getuser from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.types import DagRunTriggeredByType @@ -149,7 +151,14 @@ def asset_materialize(args, *, session: Session = NEW_SESSION) -> None: if next(dag_id_it, None) is not None: raise SystemExit(f"More than one DAG materializes asset with {select_message}.") - dagrun = trigger_dag(dag_id=dag_id, triggered_by=DagRunTriggeredByType.CLI, session=session) + try: + user = getuser() + except AirflowConfigException as e: + log.warning("Failed to get user name from os: %s, not setting the triggering user", e) + user = None + dagrun = trigger_dag( + dag_id=dag_id, triggered_by=DagRunTriggeredByType.CLI, triggering_user_name=user, session=session + ) if dagrun is not None: data = [DAGRunResponse.model_validate(dagrun).model_dump(mode="json")] else: diff --git a/airflow-core/src/airflow/cli/commands/backfill_command.py b/airflow-core/src/airflow/cli/commands/backfill_command.py index 06deb50cdead0..444bc35aea34a 100644 --- a/airflow-core/src/airflow/cli/commands/backfill_command.py +++ b/airflow-core/src/airflow/cli/commands/backfill_command.py @@ -22,12 +22,16 @@ from airflow import settings from airflow.cli.simple_table import AirflowConsole +from airflow.exceptions import AirflowConfigException from airflow.models.backfill import ReprocessBehavior, _create_backfill, _do_dry_run from airflow.utils import cli as cli_utils from airflow.utils.cli import sigint_handler +from airflow.utils.platform import getuser from airflow.utils.providers_configuration_loader import providers_configuration_loaded from airflow.utils.session import create_session +log = logging.getLogger(__name__) + @cli_utils.action_cli @providers_configuration_loaded @@ -53,6 +57,7 @@ def create_backfill(args) -> None: reverse=args.run_backwards, dag_run_conf=args.dag_run_conf, reprocess_behavior=reprocess_behavior, + run_on_latest_version=args.run_on_latest_version, ) for k, v in params.items(): console.print(f" - {k} = {v}") @@ -70,6 +75,11 @@ def create_backfill(args) -> None: console.print(f" - {d}") return + try: + user = getuser() + except AirflowConfigException as e: + log.warning("Failed to get user name from os: %s, not setting the triggering user", e) + user = None _create_backfill( dag_id=args.dag_id, from_date=args.from_date, @@ -77,5 +87,7 @@ def create_backfill(args) -> None: max_active_runs=args.max_active_runs, reverse=args.run_backwards, dag_run_conf=args.dag_run_conf, + triggering_user_name=user, reprocess_behavior=reprocess_behavior, + run_on_latest_version=args.run_on_latest_version, ) diff --git a/airflow-core/src/airflow/cli/commands/config_command.py b/airflow-core/src/airflow/cli/commands/config_command.py index cdf07f9c2c20f..0c30ce111af65 100644 --- a/airflow-core/src/airflow/cli/commands/config_command.py +++ b/airflow-core/src/airflow/cli/commands/config_command.py @@ -161,7 +161,6 @@ def message(self) -> str | None: old_default="SequentialExecutor", new_default="LocalExecutor", was_removed=False, - breaking=True, ), ConfigChange( config=ConfigParameter("core", "hostname"), @@ -346,6 +345,18 @@ def message(self) -> str | None: ), ConfigChange( config=ConfigParameter("webserver", "cookie_samesite"), + renamed_to=ConfigParameter("fab", "cookie_samesite"), + breaking=True, + ), + ConfigChange( + config=ConfigParameter("webserver", "audit_view_included_events"), + ), + ConfigChange( + config=ConfigParameter("webserver", "audit_view_excluded_events"), + ), + ConfigChange( + config=ConfigParameter("webserver", "instance_name"), + renamed_to=ConfigParameter("api", "instance_name"), ), ConfigChange( config=ConfigParameter("webserver", "update_fab_perms"), @@ -379,24 +390,53 @@ def message(self) -> str | None: config=ConfigParameter("webserver", "session_lifetime_minutes"), renamed_to=ConfigParameter("fab", "session_lifetime_minutes"), ), + ConfigChange( + config=ConfigParameter("webserver", "access_denied_message"), + renamed_to=ConfigParameter("fab", "access_denied_message"), + ), + ConfigChange( + config=ConfigParameter("webserver", "expose_hostname"), + renamed_to=ConfigParameter("fab", "expose_hostname"), + ), + ConfigChange( + config=ConfigParameter("webserver", "navbar_color"), + renamed_to=ConfigParameter("fab", "navbar_color"), + ), + ConfigChange( + config=ConfigParameter("webserver", "navbar_text_color"), + renamed_to=ConfigParameter("fab", "navbar_text_color"), + ), + ConfigChange( + config=ConfigParameter("webserver", "navbar_hover_color"), + renamed_to=ConfigParameter("fab", "navbar_hover_color"), + ), + ConfigChange( + config=ConfigParameter("webserver", "navbar_text_hover_color"), + renamed_to=ConfigParameter("fab", "navbar_text_hover_color"), + ), + ConfigChange( + config=ConfigParameter("webserver", "x_frame_enabled"), + was_deprecated=False, + ), ConfigChange( config=ConfigParameter("webserver", "base_url"), renamed_to=ConfigParameter("api", "base_url"), ), + ConfigChange( + config=ConfigParameter("webserver", "secret_key"), + renamed_to=ConfigParameter("api", "secret_key"), + ), ConfigChange( config=ConfigParameter("webserver", "web_server_host"), renamed_to=ConfigParameter("api", "host"), - breaking=True, ), ConfigChange( config=ConfigParameter("webserver", "web_server_port"), renamed_to=ConfigParameter("api", "port"), - breaking=True, ), ConfigChange( config=ConfigParameter("webserver", "workers"), renamed_to=ConfigParameter("api", "workers"), - breaking=True, ), ConfigChange( config=ConfigParameter("webserver", "web_server_worker_timeout"), @@ -405,17 +445,22 @@ def message(self) -> str | None: ConfigChange( config=ConfigParameter("webserver", "web_server_ssl_cert"), renamed_to=ConfigParameter("api", "ssl_cert"), - breaking=True, ), ConfigChange( config=ConfigParameter("webserver", "web_server_ssl_key"), renamed_to=ConfigParameter("api", "ssl_key"), - breaking=True, ), ConfigChange( config=ConfigParameter("webserver", "access_logfile"), renamed_to=ConfigParameter("api", "access_logfile"), - breaking=True, + ), + ConfigChange( + config=ConfigParameter("webserver", "grid_view_sorting_order"), + renamed_to=ConfigParameter("api", "grid_view_sorting_order"), + ), + ConfigChange( + config=ConfigParameter("webserver", "enable_swagger_ui"), + renamed_to=ConfigParameter("api", "enable_swagger_ui"), ), ConfigChange( config=ConfigParameter("webserver", "error_logfile"), @@ -466,31 +511,69 @@ def message(self) -> str | None: was_deprecated=False, ), ConfigChange( - config=ConfigParameter("webserver", "enable_proxy_fix"), + config=ConfigParameter("webserver", "log_fetch_timeout_sec"), + renamed_to=ConfigParameter("api", "log_fetch_timeout_sec"), + ), + ConfigChange( + config=ConfigParameter("webserver", "hide_paused_dags_by_default"), + renamed_to=ConfigParameter("api", "hide_paused_dags_by_default"), + ), + ConfigChange( + config=ConfigParameter("webserver", "page_size"), + renamed_to=ConfigParameter("api", "page_size"), + ), + ConfigChange( + config=ConfigParameter("webserver", "default_wrap"), + renamed_to=ConfigParameter("api", "default_wrap"), + ), + ConfigChange( + config=ConfigParameter("webserver", "require_confirmation_dag_change"), + renamed_to=ConfigParameter("api", "require_confirmation_dag_change"), + ), + ConfigChange( + config=ConfigParameter("webserver", "instance_name_has_markup"), was_deprecated=False, ), ConfigChange( - config=ConfigParameter("webserver", "proxy_fix_x_for"), + config=ConfigParameter("webserver", "warn_deployment_exposure"), was_deprecated=False, ), + ConfigChange( + config=ConfigParameter("webserver", "auto_refresh_interval"), + renamed_to=ConfigParameter("api", "auto_refresh_interval"), + ), + ConfigChange( + config=ConfigParameter("webserver", "enable_proxy_fix"), + renamed_to=ConfigParameter("fab", "enable_proxy_fix"), + ), + ConfigChange( + config=ConfigParameter("webserver", "proxy_fix_x_for"), + renamed_to=ConfigParameter("fab", "proxy_fix_x_for"), + ), ConfigChange( config=ConfigParameter("webserver", "proxy_fix_x_proto"), - was_deprecated=False, + renamed_to=ConfigParameter("fab", "proxy_fix_x_proto"), ), ConfigChange( config=ConfigParameter("webserver", "proxy_fix_x_host"), - was_deprecated=False, + renamed_to=ConfigParameter("fab", "proxy_fix_x_host"), ), ConfigChange( config=ConfigParameter("webserver", "proxy_fix_x_port"), - was_deprecated=False, + renamed_to=ConfigParameter("fab", "proxy_fix_x_port"), ), ConfigChange( config=ConfigParameter("webserver", "proxy_fix_x_prefix"), - was_deprecated=False, + renamed_to=ConfigParameter("fab", "proxy_fix_x_prefix"), + ), + ConfigChange( + config=ConfigParameter("webserver", "expose_config"), + renamed_to=ConfigParameter("api", "expose_config"), ), ConfigChange( config=ConfigParameter("webserver", "cookie_secure"), + renamed_to=ConfigParameter("fab", "cookie_secure"), + breaking=True, was_deprecated=False, ), ConfigChange( @@ -569,7 +652,6 @@ def message(self) -> str | None: "If your DAGs rely on catchup behavior, not explicitly defined in the DAG definition, " "set this configuration parameter to `True` in the `scheduler` section of your `airflow.cfg` " "to enable the behavior from Airflow 2.x.", - breaking=True, ), ConfigChange( config=ConfigParameter("scheduler", "create_cron_data_intervals"), @@ -577,7 +659,6 @@ def message(self) -> str | None: old_default="True", new_default="False", was_removed=False, - breaking=True, ), ConfigChange( config=ConfigParameter("scheduler", "create_delta_data_intervals"), @@ -585,7 +666,6 @@ def message(self) -> str | None: old_default="True", new_default="False", was_removed=False, - breaking=True, ), ConfigChange( config=ConfigParameter("scheduler", "processor_poll_interval"), @@ -665,7 +745,6 @@ def message(self) -> str | None: ConfigChange( config=ConfigParameter("scheduler", "dag_dir_list_interval"), renamed_to=ConfigParameter("dag_processor", "refresh_interval"), - breaking=True, ), ConfigChange( config=ConfigParameter("scheduler", "local_task_job_heartbeat_sec"), @@ -722,7 +801,6 @@ def message(self) -> str | None: ConfigChange( config=ConfigParameter("triggerer", "default_capacity"), renamed_to=ConfigParameter("triggerer", "capacity"), - breaking=True, ), # email ConfigChange( @@ -993,7 +1071,7 @@ def update_config(args) -> None: console.print(f" - {change_msg}") if dry_run: console.print( - "[blue]Dry-run is mode enabled. To apply above airflow.cfg run the command " + "[blue]Dry-run mode is enabled. To apply above airflow.cfg run the command " "with `--fix`.[/blue]" ) else: diff --git a/airflow-core/src/airflow/cli/commands/connection_command.py b/airflow-core/src/airflow/cli/commands/connection_command.py index aace3f9c9aede..7fe430c05bfe3 100644 --- a/airflow-core/src/airflow/cli/commands/connection_command.py +++ b/airflow-core/src/airflow/cli/commands/connection_command.py @@ -33,7 +33,6 @@ from airflow.cli.utils import is_stdout, print_export_output from airflow.configuration import conf from airflow.exceptions import AirflowNotFoundException -from airflow.hooks.base import BaseHook from airflow.models import Connection from airflow.providers_manager import ProvidersManager from airflow.secrets.local_filesystem import load_connections_dict @@ -67,7 +66,7 @@ def _connection_mapper(conn: Connection) -> dict[str, Any]: def connections_get(args): """Get a connection.""" try: - conn = BaseHook.get_connection(args.conn_id) + conn = Connection.get_connection_from_secrets(args.conn_id) except AirflowNotFoundException: raise SystemExit("Connection not found.") AirflowConsole().print_as( @@ -369,7 +368,7 @@ def connections_test(args) -> None: print(f"Retrieving connection: {args.conn_id!r}") try: - conn = BaseHook.get_connection(args.conn_id) + conn = Connection.get_connection_from_secrets(args.conn_id) except AirflowNotFoundException: console.print("[bold yellow]\nConnection not found.\n") raise SystemExit(1) diff --git a/airflow-core/src/airflow/cli/commands/daemon_utils.py b/airflow-core/src/airflow/cli/commands/daemon_utils.py index e1c3690a657cb..c55c12b380461 100644 --- a/airflow-core/src/airflow/cli/commands/daemon_utils.py +++ b/airflow-core/src/airflow/cli/commands/daemon_utils.py @@ -18,7 +18,7 @@ import signal from argparse import Namespace -from typing import Callable +from collections.abc import Callable from daemon import daemon from daemon.pidfile import TimeoutPIDLockFile @@ -45,7 +45,7 @@ def run_command_with_daemon_option( :param callback: the actual command to run with or without daemon context :param should_setup_logging: if true, then a log file handler for the daemon process will be created :param umask: file access creation mask ("umask") to set for the process on daemon start - :param pid_file: if specified, this file path us used to store daemon process PID. + :param pid_file: if specified, this file path is used to store daemon process PID. If not specified, a file path is generated with the default pattern. """ if args.daemon: diff --git a/airflow-core/src/airflow/cli/commands/dag_command.py b/airflow-core/src/airflow/cli/commands/dag_command.py index e70127358f5e5..1fbba1d7e45a2 100644 --- a/airflow-core/src/airflow/cli/commands/dag_command.py +++ b/airflow-core/src/airflow/cli/commands/dag_command.py @@ -24,34 +24,29 @@ import json import logging import operator -import os import re import subprocess import sys -from pathlib import Path from typing import TYPE_CHECKING -from itsdangerous import URLSafeSerializer -from marshmallow import fields -from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field from sqlalchemy import func, select +from airflow._shared.timezones import timezone from airflow.api.client import get_current_api_client +from airflow.api_fastapi.core_api.datamodels.dags import DAGResponse from airflow.cli.simple_table import AirflowConsole from airflow.cli.utils import fetch_dag_run_from_run_id_or_logical_date_string -from airflow.configuration import conf from airflow.dag_processing.bundles.manager import DagBundlesManager -from airflow.exceptions import AirflowException +from airflow.exceptions import AirflowConfigException, AirflowException from airflow.jobs.job import Job -from airflow.models import DagBag, DagModel, DagRun, DagTag, TaskInstance -from airflow.models.dag import DAG +from airflow.models import DagBag, DagModel, DagRun, TaskInstance from airflow.models.errors import ParseImportError from airflow.models.serialized_dag import SerializedDagModel -from airflow.sdk.definitions._internal.dag_parsing_context import _airflow_parsing_context_manager -from airflow.utils import cli as cli_utils, timezone +from airflow.utils import cli as cli_utils from airflow.utils.cli import get_dag, suppress_logs_and_warning, validate_dag_bundle_arg from airflow.utils.dot_renderer import render_dag, render_dag_dependencies from airflow.utils.helpers import ask_yesno +from airflow.utils.platform import getuser from airflow.utils.providers_configuration_loader import providers_configuration_loaded from airflow.utils.session import NEW_SESSION, create_session, provide_session from airflow.utils.state import DagRunState @@ -60,67 +55,12 @@ from graphviz.dot import Dot from sqlalchemy.orm import Session + from airflow.models.dag import DAG from airflow.timetables.base import DataInterval -log = logging.getLogger(__name__) +DAG_DETAIL_FIELDS = {*DAGResponse.model_fields, *DAGResponse.model_computed_fields} -# TODO: To clean up api_connexion, we need to move the below 2 classes to this file until migrated to FastAPI -class DagTagSchema(SQLAlchemySchema): - """Dag Tag schema.""" - - class Meta: - """Meta.""" - - model = DagTag - - name = auto_field() - - -class DAGSchema(SQLAlchemySchema): - """DAG schema.""" - - class Meta: - """Meta.""" - - model = DagModel - - dag_id = auto_field(dump_only=True) - dag_display_name = fields.String(attribute="dag_display_name", dump_only=True) - bundle_name = auto_field(dump_only=True) - bundle_version = auto_field(dump_only=True) - is_paused = auto_field() - is_stale = auto_field(dump_only=True) - last_parsed_time = auto_field(dump_only=True) - last_expired = auto_field(dump_only=True) - fileloc = auto_field(dump_only=True) - file_token = fields.Method("get_token", dump_only=True) - owners = fields.Method("get_owners", dump_only=True) - description = auto_field(dump_only=True) - timetable_summary = auto_field(dump_only=True) - timetable_description = auto_field(dump_only=True) - tags = fields.List(fields.Nested(DagTagSchema), dump_only=True) - max_active_tasks = auto_field(dump_only=True) - max_active_runs = auto_field(dump_only=True) - max_consecutive_failed_dag_runs = auto_field(dump_only=True) - has_task_concurrency_limits = auto_field(dump_only=True) - has_import_errors = auto_field(dump_only=True) - next_dagrun = auto_field(dump_only=True) - next_dagrun_data_interval_start = auto_field(dump_only=True) - next_dagrun_data_interval_end = auto_field(dump_only=True) - next_dagrun_create_after = auto_field(dump_only=True) - - @staticmethod - def get_owners(obj: DagModel): - """Convert owners attribute to DAG representation.""" - if not getattr(obj, "owners", None): - return [] - return obj.owners.split(",") - - @staticmethod - def get_token(obj: DagModel): - """Return file token.""" - serializer = URLSafeSerializer(conf.get_mandatory_value("webserver", "secret_key")) - return serializer.dumps(obj.fileloc) +log = logging.getLogger(__name__) @cli_utils.action_cli @@ -128,12 +68,18 @@ def get_token(obj: DagModel): def dag_trigger(args) -> None: """Create a dag run for the specified dag.""" api_client = get_current_api_client() + try: + user = getuser() + except AirflowConfigException as e: + log.warning("Failed to get user name from os: %s, not setting the triggering user", e) + user = None try: message = api_client.trigger_dag( dag_id=args.dag_id, run_id=args.run_id, conf=args.conf, logical_date=args.logical_date, + triggering_user_name=user, replace_microseconds=args.replace_microseconds, ) AirflowConsole().print_as( @@ -289,12 +235,13 @@ def _get_dagbag_dag_details(dag: DAG) -> dict: return { "dag_id": dag.dag_id, "dag_display_name": dag.dag_display_name, - "bundle_name": dag.get_bundle_name(), - "bundle_version": dag.get_bundle_version(), - "is_paused": dag.get_is_paused(), - "is_stale": dag.get_is_stale(), + "bundle_name": dag.get_bundle_name() if hasattr(dag, "get_bundle_name") else None, + "bundle_version": dag.get_bundle_version() if hasattr(dag, "get_bundle_version") else None, + "is_paused": dag.get_is_paused() if hasattr(dag, "get_is_paused") else None, + "is_stale": dag.get_is_stale() if hasattr(dag, "get_is_stale") else None, "last_parsed_time": None, "last_expired": None, + "relative_fileloc": dag.relative_fileloc, "fileloc": dag.fileloc, "file_token": None, "owners": dag.owner, @@ -309,10 +256,10 @@ def _get_dagbag_dag_details(dag: DAG) -> dict: t.max_active_tis_per_dag is not None or t.max_active_tis_per_dagrun is not None for t in dag.tasks ), "has_import_errors": False, - "next_dagrun": None, "next_dagrun_data_interval_start": None, "next_dagrun_data_interval_end": None, - "next_dagrun_create_after": None, + "next_dagrun_logical_date": None, + "next_dagrun_run_after": None, } @@ -391,28 +338,48 @@ def print_execution_interval(interval: DataInterval | None): def dag_list_dags(args, session: Session = NEW_SESSION) -> None: """Display dags with or without stats at the command line.""" cols = args.columns if args.columns else [] - dag_schema_fields = DAGSchema().fields - invalid_cols = [c for c in cols if c not in dag_schema_fields] - valid_cols = [c for c in cols if c in dag_schema_fields] - if invalid_cols: + if invalid_cols := [c for c in cols if c not in DAG_DETAIL_FIELDS]: from rich import print as rich_print rich_print( f"[red][bold]Error:[/bold] Ignoring the following invalid columns: {invalid_cols}. " - f"List of valid columns: {list(dag_schema_fields.keys())}", + f"List of valid columns: {sorted(DAG_DETAIL_FIELDS)}", file=sys.stderr, ) - dagbag = DagBag(read_dags_from_db=True) - dagbag.collect_dags_from_db() + dagbag_import_errors = 0 + dags_list = [] + if args.local: + # Get import errors from the local area + if args.bundle_name: + manager = DagBundlesManager() + validate_dag_bundle_arg(args.bundle_name) + all_bundles = list(manager.get_all_dag_bundles()) + bundles_to_search = set(args.bundle_name) + + for bundle in all_bundles: + if bundle.name in bundles_to_search: + dagbag = DagBag(bundle.path, bundle_path=bundle.path) + dagbag.collect_dags() + dags_list.extend(list(dagbag.dags.values())) + dagbag_import_errors += len(dagbag.import_errors) + else: + dagbag = DagBag() + dagbag.collect_dags() + dags_list.extend(list(dagbag.dags.values())) + dagbag_import_errors += len(dagbag.import_errors) + else: + # Get import errors from the DB + dagbag = DagBag(read_dags_from_db=True) + dagbag.collect_dags_from_db() + dags_list = list(dagbag.dags.values()) - # Get import errors from the DB - query = select(func.count()).select_from(ParseImportError) - if args.bundle_name: - query = query.where(ParseImportError.bundle_name.in_(args.bundle_name)) + query = select(func.count()).select_from(ParseImportError) + if args.bundle_name: + query = query.where(ParseImportError.bundle_name.in_(args.bundle_name)) - dagbag_import_errors = session.scalar(query) + dagbag_import_errors = session.scalar(query) if dagbag_import_errors > 0: from rich import print as rich_print @@ -426,10 +393,12 @@ def dag_list_dags(args, session: Session = NEW_SESSION) -> None: def get_dag_detail(dag: DAG) -> dict: dag_model = DagModel.get_dagmodel(dag.dag_id, session=session) if dag_model: - dag_detail = DAGSchema().dump(dag_model) + dag_detail = DAGResponse.from_orm(dag_model).model_dump() else: dag_detail = _get_dagbag_dag_details(dag) - return {col: dag_detail[col] for col in valid_cols} + if not cols: + return dag_detail + return {col: dag_detail[col] for col in cols if col in DAG_DETAIL_FIELDS} def filter_dags_by_bundle(dags: list[DAG], bundle_names: list[str] | None) -> list[DAG]: """Filter DAGs based on the specified bundle name, if provided.""" @@ -441,7 +410,7 @@ def filter_dags_by_bundle(dags: list[DAG], bundle_names: list[str] | None) -> li AirflowConsole().print_as( data=sorted( - filter_dags_by_bundle(list(dagbag.dags.values()), args.bundle_name), + filter_dags_by_bundle(dags_list, args.bundle_name if not args.local else None), key=operator.attrgetter("dag_id"), ), output=args.output, @@ -458,7 +427,7 @@ def dag_details(args, session: Session = NEW_SESSION): dag = DagModel.get_dagmodel(args.dag_id, session=session) if not dag: raise SystemExit(f"DAG: {args.dag_id} does not exist in 'dag' table") - dag_detail = DAGSchema().dump(dag) + dag_detail = DAGResponse.from_orm(dag).model_dump() if args.output in ["table", "plain"]: data = [{"property_name": key, "property_value": value} for key, value in dag_detail.items()] @@ -479,22 +448,41 @@ def dag_list_import_errors(args, session: Session = NEW_SESSION) -> None: """Display dags with import errors on the command line.""" data = [] - # Get import errors from the DB - query = select(ParseImportError) - if args.bundle_name: - validate_dag_bundle_arg(args.bundle_name) - query = query.where(ParseImportError.bundle_name.in_(args.bundle_name)) - - dagbag_import_errors = session.scalars(query).all() + if args.local: + # Get import errors from local areas + if args.bundle_name: + manager = DagBundlesManager() + validate_dag_bundle_arg(args.bundle_name) + all_bundles = list(manager.get_all_dag_bundles()) + bundles_to_search = set(args.bundle_name) + + for bundle in all_bundles: + if bundle.name in bundles_to_search: + dagbag = DagBag(bundle.path, bundle_path=bundle.path) + for filename, errors in dagbag.import_errors.items(): + data.append({"bundle_name": bundle.name, "filepath": filename, "error": errors}) + else: + dagbag = DagBag() + for filename, errors in dagbag.import_errors.items(): + data.append({"filepath": filename, "error": errors}) - for import_error in dagbag_import_errors: - data.append( - { - "bundle_name": import_error.bundle_name, - "filepath": import_error.filename, - "error": import_error.stacktrace, - } - ) + else: + # Get import errors from the DB + query = select(ParseImportError) + if args.bundle_name: + validate_dag_bundle_arg(args.bundle_name) + query = query.where(ParseImportError.bundle_name.in_(args.bundle_name)) + + dagbag_import_errors = session.scalars(query).all() + + for import_error in dagbag_import_errors: + data.append( + { + "bundle_name": import_error.bundle_name, + "filepath": import_error.filename, + "error": import_error.stacktrace, + } + ) AirflowConsole().print_as( data=data, output=args.output, @@ -608,29 +596,6 @@ def _render_dagrun(dr: DagRun) -> dict[str, str]: AirflowConsole().print_as(data=dag_runs, output=args.output, mapper=_render_dagrun) -def _parse_and_get_dag(dag_id: str) -> DAG | None: - """Given a dag_id, determine the bundle and relative fileloc from the db, then parse and return the DAG.""" - db_dag = get_dag(bundle_names=None, dag_id=dag_id, from_db=True) - bundle_name = db_dag.get_bundle_name() - if bundle_name is None: - raise AirflowException( - f"Bundle name for DAG {dag_id!r} is not found in the database. This should not happen." - ) - if db_dag.relative_fileloc is None: - raise AirflowException( - f"Relative fileloc for DAG {dag_id!r} is not found in the database. This should not happen." - ) - bundle = DagBundlesManager().get_bundle(bundle_name) - bundle.initialize() - dag_absolute_path = os.fspath(Path(bundle.path, db_dag.relative_fileloc)) - - with _airflow_parsing_context_manager(dag_id=dag_id): - bag = DagBag( - dag_folder=dag_absolute_path, include_examples=False, safe_mode=False, load_op_links=False - ) - return bag.dags.get(dag_id) - - @cli_utils.action_cli @providers_configuration_loaded @provide_session @@ -649,19 +614,17 @@ def dag_test(args, dag: DAG | None = None, session: Session = NEW_SESSION) -> No re.compile(args.mark_success_pattern) if args.mark_success_pattern is not None else None ) - dag = dag or _parse_and_get_dag(args.dag_id) + dag = dag or get_dag(bundle_names=args.bundle_name, dag_id=args.dag_id, dagfile_path=args.dagfile_path) if not dag: raise AirflowException( f"Dag {args.dag_id!r} could not be found; either it does not exist or it failed to parse." ) - dag = DAG.from_sdk_dag(dag) dr: DagRun = dag.test( logical_date=logical_date, run_conf=run_conf, use_executor=use_executor, mark_success_pattern=mark_success_pattern, - session=session, ) show_dagrun = args.show_dagrun imgcat = args.imgcat_dagrun diff --git a/airflow-core/src/airflow/cli/commands/db_command.py b/airflow-core/src/airflow/cli/commands/db_command.py index 5a4f70cc60472..ad9bfc393adf9 100644 --- a/airflow-core/src/airflow/cli/commands/db_command.py +++ b/airflow-core/src/airflow/cli/commands/db_command.py @@ -50,29 +50,39 @@ def resetdb(args): db.resetdb(skip_init=args.skip_init) -def _get_version_revision( - version: str, recursion_limit: int = 10, revision_heads_map: dict[str, str] | None = None -) -> str | None: +def _get_version_revision(version: str, revision_heads_map: dict[str, str] | None = None) -> str | None: """ - Recursively search for the revision of the given version in revision_heads_map. + Search for the revision of the given version in revision_heads_map. This searches given revision_heads_map for the revision of the given version, recursively searching for the previous version if the given version is not found. + + ``revision_heads_map`` must already be sorted in the dict in ascending order for this function to work. No + checks are made that this is true """ if revision_heads_map is None: revision_heads_map = _REVISION_HEADS_MAP + # Exact match found, we can just return it if version in revision_heads_map: return revision_heads_map[version] + try: - major, minor, patch = map(int, version.split(".")) + wanted = tuple(map(int, version.split("."))) except ValueError: return None - new_version = f"{major}.{minor}.{patch - 1}" - recursion_limit -= 1 - if recursion_limit <= 0: - # Prevent infinite recursion as I can't imagine 10 successive versions without migration + + # Else, we walk backwards in the revision map until we find a version that is < the target + for revision, head in reversed(revision_heads_map.items()): + try: + current = tuple(map(int, revision.split("."))) + except ValueError: + log.debug("Unable to parse HEAD revision", exc_info=True) + return None + + if current < wanted: + return head + else: return None - return _get_version_revision(new_version, recursion_limit) def run_db_migrate_command(args, command, revision_heads_map: dict[str, str]): @@ -283,6 +293,7 @@ def cleanup_tables(args): verbose=args.verbose, confirm=not args.yes, skip_archive=args.skip_archive, + batch_size=args.batch_size, ) diff --git a/airflow-core/src/airflow/cli/commands/db_manager_command.py b/airflow-core/src/airflow/cli/commands/db_manager_command.py new file mode 100644 index 0000000000000..7961ea12d023a --- /dev/null +++ b/airflow-core/src/airflow/cli/commands/db_manager_command.py @@ -0,0 +1,61 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from airflow import settings +from airflow.cli.commands.db_command import run_db_downgrade_command, run_db_migrate_command +from airflow.configuration import conf +from airflow.utils import cli as cli_utils +from airflow.utils.module_loading import import_string +from airflow.utils.providers_configuration_loader import providers_configuration_loaded + + +def _get_db_manager(classpath: str): + """Import the db manager class.""" + managers = conf.getlist("database", "external_db_managers") + if classpath not in managers: + raise SystemExit(f"DB manager {classpath} not found in configuration.") + return import_string(classpath.strip()) + + +@providers_configuration_loaded +def resetdb(args): + """Reset the metadata database.""" + db_manager = _get_db_manager(args.import_path) + if not (args.yes or input("This will drop existing tables if they exist. Proceed? (y/n)").upper() == "Y"): + raise SystemExit("Cancelled") + db_manager(settings.Session()).resetdb(skip_init=args.skip_init) + + +@cli_utils.action_cli(check_db=False) +@providers_configuration_loaded +def migratedb(args): + """Migrates the metadata database.""" + db_manager = _get_db_manager(args.import_path) + session = settings.Session() + upgrade_command = db_manager(session).upgradedb + run_db_migrate_command(args, upgrade_command, revision_heads_map=db_manager.revision_heads_map) + + +@cli_utils.action_cli(check_db=False) +@providers_configuration_loaded +def downgrade(args): + """Downgrades the metadata database.""" + db_manager = _get_db_manager(args.import_path) + session = settings.Session() + downgrade_command = db_manager(session).downgrade + run_db_downgrade_command(args, downgrade_command, revision_heads_map=db_manager.revision_heads_map) diff --git a/airflow-core/src/airflow/cli/commands/provider_command.py b/airflow-core/src/airflow/cli/commands/provider_command.py index bd03d07ee45b2..81a96b2a9bc24 100644 --- a/airflow-core/src/airflow/cli/commands/provider_command.py +++ b/airflow-core/src/airflow/cli/commands/provider_command.py @@ -220,6 +220,19 @@ def executors_list(args): ) +@suppress_logs_and_warning +@providers_configuration_loaded +def queues_list(args): + """List all queues at the command line.""" + AirflowConsole().print_as( + data=list(ProvidersManager().queue_class_names), + output=args.output, + mapper=lambda x: { + "queue_class_names": x, + }, + ) + + @suppress_logs_and_warning @providers_configuration_loaded def config_list(args): diff --git a/airflow-core/src/airflow/cli/commands/rotate_fernet_key_command.py b/airflow-core/src/airflow/cli/commands/rotate_fernet_key_command.py index b95e8f3752cc1..0d80cee446f99 100644 --- a/airflow-core/src/airflow/cli/commands/rotate_fernet_key_command.py +++ b/airflow-core/src/airflow/cli/commands/rotate_fernet_key_command.py @@ -71,10 +71,13 @@ def rotate_items_in_batches_v2(session, model_class, filter_condition=None, batc This function is taking advantage of yield_per available in SQLAlchemy 2.x. """ - while True: - query = select(model_class) - if filter_condition is not None: - query = query.where(filter_condition) + query = select(model_class) + if filter_condition is not None: + query = query.where(filter_condition) + + with session.no_autoflush: # Temporarily disable autoflush while iterating to prevent deadlocks. items = session.scalars(query).yield_per(batch_size) for item in items: item.rotate_fernet_key() + + # The dirty items will be flushed later by the session's transaction management. diff --git a/airflow-core/src/airflow/cli/commands/task_command.py b/airflow-core/src/airflow/cli/commands/task_command.py index 0a4e771315ba9..08e32c3be7618 100644 --- a/airflow-core/src/airflow/cli/commands/task_command.py +++ b/airflow-core/src/airflow/cli/commands/task_command.py @@ -19,7 +19,6 @@ from __future__ import annotations -import functools import importlib import json import logging @@ -29,26 +28,31 @@ from typing import TYPE_CHECKING, Protocol, cast from airflow import settings +from airflow._shared.timezones import timezone from airflow.cli.simple_table import AirflowConsole from airflow.cli.utils import fetch_dag_run_from_run_id_or_logical_date_string -from airflow.exceptions import DagRunNotFound, TaskDeferred, TaskInstanceNotFound +from airflow.exceptions import AirflowConfigException, DagRunNotFound, TaskInstanceNotFound from airflow.models import TaskInstance -from airflow.models.dag import DAG, _run_inline_trigger +from airflow.models.dag import DAG as SchedulerDAG, _get_or_create_dagrun +from airflow.models.dag_version import DagVersion from airflow.models.dagrun import DagRun +from airflow.sdk.definitions.dag import DAG, _run_task from airflow.sdk.definitions.param import ParamsDict from airflow.sdk.execution_time.secrets_masker import RedactedIO +from airflow.serialization.serialized_objects import SerializedDAG from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.dependencies_deps import SCHEDULER_QUEUED_DEPS -from airflow.utils import cli as cli_utils, timezone +from airflow.utils import cli as cli_utils from airflow.utils.cli import ( get_dag, get_dag_by_file_location, get_dags, suppress_logs_and_warning, ) +from airflow.utils.platform import getuser from airflow.utils.providers_configuration_loader import providers_configuration_loaded from airflow.utils.session import NEW_SESSION, create_session, provide_session -from airflow.utils.state import DagRunState +from airflow.utils.state import DagRunState, State from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -57,10 +61,11 @@ from sqlalchemy.orm.session import Session - from airflow.models.operator import Operator - from airflow.typing_compat import Self + from airflow.models.mappedoperator import MappedOperator + from airflow.serialization.serialized_objects import SerializedBaseOperator CreateIfNecessary = Literal[False, "db", "memory"] + Operator = MappedOperator | SerializedBaseOperator log = logging.getLogger(__name__) @@ -120,6 +125,11 @@ def _get_dag_run( else None ) run_after = data_interval.end if data_interval else timezone.utcnow() + try: + user = getuser() + except AirflowConfigException as e: + log.warning("Failed to get user name from os: %s, not setting the triggering user", e) + user = None if create_if_necessary == "memory": dag_run = DagRun( dag_id=dag.dag_id, @@ -129,19 +139,23 @@ def _get_dag_run( data_interval=data_interval, run_after=run_after, triggered_by=DagRunTriggeredByType.CLI, + triggering_user_name=user, state=DagRunState.RUNNING, ) return dag_run, True if create_if_necessary == "db": - dag_run = dag.create_dagrun( + scheduler_dag = SerializedDAG.deserialize_dag(SerializedDAG.serialize_dag(dag)) # type: ignore[arg-type] + dag_run = _get_or_create_dagrun( + dag=scheduler_dag, run_id=_generate_temporary_run_id(), logical_date=dag_run_logical_date, data_interval=data_interval, run_after=run_after, - run_type=DagRunType.MANUAL, triggered_by=DagRunTriggeredByType.CLI, - state=DagRunState.RUNNING, + triggering_user_name=user, session=session, + start_date=logical_date or run_after, + conf=None, ) return dag_run, True raise ValueError(f"unknown create_if_necessary value: {create_if_necessary!r}") @@ -160,11 +174,6 @@ def _get_ti( dag = task.dag if dag is None: raise ValueError("Cannot get task instance for a task not assigned to a DAG") - if not isinstance(dag, DAG): - # TODO: Task-SDK: Shouldn't really happen, and this command will go away before 3.0 - raise ValueError( - f"We need a {DAG.__module__}.DAG, but we got {type(dag).__module__}.{type(dag).__name__}!" - ) # this check is imperfect because diff dags could have tasks with same name # but in a task, dag_id is a property that accesses its dag, and we don't @@ -195,7 +204,13 @@ def _get_ti( f"run_id or logical_date of {logical_date_or_run_id!r} not found" ) # TODO: Validate map_index is in range? - ti = TaskInstance(task, run_id=dag_run.run_id, map_index=map_index) + dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) + if not dag_version: + # TODO: Remove this once DagVersion.get_latest_version is guaranteed to return a DagVersion/raise + raise ValueError( + f"Cannot create TaskInstance for {dag.dag_id} because the Dag is not serialized." + ) + ti = TaskInstance(task, run_id=dag_run.run_id, map_index=map_index, dag_version_id=dag_version.id) if dag_run in session: session.add(ti) ti.dag_run = dag_run @@ -232,7 +247,9 @@ def task_failed_deps(args) -> None: to have succeeded, but found 1 non-success(es). """ dag = get_dag(args.bundle_name, args.dag_id) - task = dag.get_task(task_id=args.task_id) + # TODO (GH-52141): get_task in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + task = cast("Operator", dag.get_task(task_id=args.task_id)) ti, _ = _get_ti(task, args.map_index, logical_date_or_run_id=args.logical_date_or_run_id) dep_context = DepContext(deps=SCHEDULER_QUEUED_DEPS) failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context)) @@ -256,7 +273,9 @@ def task_state(args) -> None: success """ dag = get_dag(args.bundle_name, args.dag_id, from_db=True) - task = dag.get_task(task_id=args.task_id) + # TODO (GH-52141): get_task in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + task = cast("Operator", dag.get_task(task_id=args.task_id)) ti, _ = _get_ti(task, args.map_index, logical_date_or_run_id=args.logical_date_or_run_id) print(ti.state) @@ -273,11 +292,13 @@ def task_list(args, dag: DAG | None = None) -> None: class _SupportedDebugger(Protocol): def post_mortem(self) -> None: ... + def set_trace(self) -> None: ... SUPPORTED_DEBUGGER_MODULES = [ "pudb", "web_pdb", + "pdbr", "ipdb", "pdb", ] @@ -293,6 +314,7 @@ def _guess_debugger() -> _SupportedDebugger: * `pudb `__ * `web_pdb `__ + * `pdbr `__ * `ipdb `__ * `pdb `__ """ @@ -342,8 +364,7 @@ def format_task_instance(ti: TaskInstance) -> dict[str, str]: @cli_utils.action_cli(check_db=False) -@provide_session -def task_test(args, dag: DAG | None = None, session: Session = NEW_SESSION) -> None: +def task_test(args, dag: DAG | None = None) -> None: """Test task for a given dag_id.""" # We want to log output from operators etc to show up here. Normally # airflow.task would redirect to a file, but here we want it to propagate @@ -367,9 +388,10 @@ def task_test(args, dag: DAG | None = None, session: Session = NEW_SESSION) -> N dag = dag or get_dag(args.bundle_name, args.dag_id) - dag = DAG.from_sdk_dag(dag) + # TODO (GH-52141): get_task in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + task = cast("Operator", dag.get_task(task_id=args.task_id)) - task = dag.get_task(task_id=args.task_id) # Add CLI provided task_params to task.params if args.task_params: passed_in_params = json.loads(args.task_params) @@ -383,31 +405,10 @@ def task_test(args, dag: DAG | None = None, session: Session = NEW_SESSION) -> N ) try: with redirect_stdout(RedactedIO()): - if args.dry_run: - ti.dry_run() - else: - ti.run(ignore_task_deps=True, ignore_ti_state=True, test_mode=True, raise_on_defer=True) - except TaskDeferred as defer: - ti.defer_task(exception=defer, session=session) - log.info("[TASK TEST] running trigger in line") - - event = _run_inline_trigger(defer.trigger) - ti.next_method = defer.method_name - ti.next_kwargs = {"event": event.payload} if event else defer.kwargs - - execute_callable = getattr(task, ti.next_method) - if ti.next_kwargs: - execute_callable = functools.partial(execute_callable, **ti.next_kwargs) - context = ti.get_template_context(ignore_param_exceptions=False) - execute_callable(context) - - log.info("[TASK TEST] Trigger completed") - except Exception: - if args.post_mortem: + _run_task(ti=ti, task=task, run_triggerer=True) + if ti.state == State.FAILED and args.post_mortem: debugger = _guess_debugger() - debugger.post_mortem() - else: - raise + debugger.set_trace() finally: if not already_has_stream_handler: # Make sure to reset back to normal. When run for CLI this doesn't @@ -425,8 +426,9 @@ def task_render(args, dag: DAG | None = None) -> None: """Render and displays templated fields for a given task.""" if not dag: dag = get_dag(args.bundle_name, args.dag_id) - dag = DAG.from_sdk_dag(dag) - task = dag.get_task(task_id=args.task_id) + # TODO (GH-52141): get_task in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + task = cast("Operator", dag.get_task(task_id=args.task_id)) ti, _ = _get_ti( task, args.map_index, logical_date_or_run_id=args.logical_date_or_run_id, create_if_necessary="memory" ) @@ -464,7 +466,7 @@ def task_clear(args) -> None: include_upstream=args.upstream, ) - DAG.clear_dags( + SchedulerDAG.clear_dags( dags, start_date=args.start_date, end_date=args.end_date, @@ -472,53 +474,3 @@ def task_clear(args) -> None: only_running=args.only_running, confirm_prompt=not args.yes, ) - - -class LoggerMutationHelper: - """ - Helper for moving and resetting handlers and other logger attrs. - - :meta private: - """ - - def __init__(self, logger: logging.Logger) -> None: - self.handlers = logger.handlers[:] - self.level = logger.level - self.propagate = logger.propagate - self.source_logger = logger - - def apply(self, logger: logging.Logger, replace: bool = True) -> None: - """ - Set ``logger`` with attrs stored on instance. - - If ``logger`` is root logger, don't change propagate. - """ - if replace: - logger.handlers[:] = self.handlers - else: - for h in self.handlers: - if h not in logger.handlers: - logger.addHandler(h) - logger.level = self.level - if logger is not logging.getLogger(): - logger.propagate = self.propagate - - def move(self, logger: logging.Logger, replace: bool = True) -> None: - """ - Replace ``logger`` attrs with those from source. - - :param logger: target logger - :param replace: if True, remove all handlers from target first; otherwise add if not present. - """ - self.apply(logger, replace=replace) - self.source_logger.propagate = True - self.source_logger.handlers[:] = [] - - def reset(self) -> None: - self.apply(self.source_logger) - - def __enter__(self) -> Self: - return self - - def __exit__(self, exc_type, exc_val, exc_tb) -> None: - self.reset() diff --git a/airflow-core/src/airflow/cli/commands/variable_command.py b/airflow-core/src/airflow/cli/commands/variable_command.py index ca78a982bde69..e20f549901b46 100644 --- a/airflow-core/src/airflow/cli/commands/variable_command.py +++ b/airflow-core/src/airflow/cli/commands/variable_command.py @@ -21,13 +21,18 @@ import json import os -from json import JSONDecodeError from sqlalchemy import select from airflow.cli.simple_table import AirflowConsole from airflow.cli.utils import print_export_output +from airflow.exceptions import ( + AirflowFileParseException, + AirflowUnsupportedFileTypeException, + VariableNotUnique, +) from airflow.models import Variable +from airflow.secrets.local_filesystem import load_variables from airflow.utils import cli as cli_utils from airflow.utils.cli import suppress_logs_and_warning from airflow.utils.providers_configuration_loader import providers_configuration_loaded @@ -81,11 +86,16 @@ def variables_import(args, session): """Import variables from a given file.""" if not os.path.exists(args.file): raise SystemExit("Missing variables file.") - with open(args.file) as varfile: - try: - var_json = json.load(varfile) - except JSONDecodeError: - raise SystemExit("Invalid variables file.") + + try: + var_json = load_variables(args.file) + except (AirflowUnsupportedFileTypeException, AirflowFileParseException, VariableNotUnique) as e: + raise SystemExit(str(e)) + except FileNotFoundError: + raise SystemExit("Missing variables file.") + except Exception as e: + raise SystemExit(f"Failed to load variables file: {e}") + suc_count = fail_count = 0 skipped = set() action_on_existing = args.action_on_existing_key diff --git a/airflow-core/src/airflow/cli/simple_table.py b/airflow-core/src/airflow/cli/simple_table.py index b8e4c6a7a41a0..4e321c1c47f7d 100644 --- a/airflow-core/src/airflow/cli/simple_table.py +++ b/airflow-core/src/airflow/cli/simple_table.py @@ -18,8 +18,8 @@ import inspect import json -from collections.abc import Sequence -from typing import TYPE_CHECKING, Any, Callable +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any from rich.box import ASCII_DOUBLE_HEAD from rich.console import Console @@ -32,7 +32,7 @@ from airflow.utils.platform import is_tty if TYPE_CHECKING: - from airflow.typing_compat import TypeGuard + from typing import TypeGuard def is_data_sequence(data: Sequence[dict | Any]) -> TypeGuard[Sequence[dict]]: diff --git a/airflow-core/src/airflow/cli/utils.py b/airflow-core/src/airflow/cli/utils.py index 605244ee71e56..4c7e6409e53a8 100644 --- a/airflow-core/src/airflow/cli/utils.py +++ b/airflow-core/src/airflow/cli/utils.py @@ -44,13 +44,12 @@ def is_stdout(fileio: IOBase) -> bool: with argparse.FileType points to stdout (by setting the path to ``-``). This is why there is no equivalent for stderr; argparse does not allow using it. - .. warning:: *fileio* must be open for this check to be successful. """ - return fileio.fileno() == sys.stdout.fileno() + return fileio is sys.stdout def print_export_output(command_type: str, exported_items: Collection, file: TextIOWrapper): - if not file.closed and is_stdout(file): + if is_stdout(file): print(f"\n{len(exported_items)} {command_type} successfully exported.", file=sys.stderr) else: print(f"{len(exported_items)} {command_type} successfully exported to {file.name}.") @@ -79,9 +78,9 @@ def fetch_dag_run_from_run_id_or_logical_date_string( from pendulum.parsing.exceptions import ParserError from sqlalchemy import select + from airflow._shared.timezones import timezone from airflow.models.dag import DAG from airflow.models.dagrun import DagRun - from airflow.utils import timezone if dag_run := DAG.fetch_dagrun(dag_id=dag_id, run_id=value, session=session): return dag_run, dag_run.logical_date diff --git a/airflow-core/src/airflow/config_templates/airflow_local_settings.py b/airflow-core/src/airflow/config_templates/airflow_local_settings.py index 319cdc972593c..7c84ba3f3b133 100644 --- a/airflow-core/src/airflow/config_templates/airflow_local_settings.py +++ b/airflow-core/src/airflow/config_templates/airflow_local_settings.py @@ -128,6 +128,27 @@ REMOTE_LOGGING: bool = conf.getboolean("logging", "remote_logging") REMOTE_TASK_LOG: RemoteLogIO | None = None +DEFAULT_REMOTE_CONN_ID: str | None = None + + +def _default_conn_name_from(mod_path, hook_name): + # Try to set the default conn name from a hook, but don't error if something goes wrong at runtime + from importlib import import_module + + global DEFAULT_REMOTE_CONN_ID + + try: + mod = import_module(mod_path) + + hook = getattr(mod, hook_name) + + DEFAULT_REMOTE_CONN_ID = getattr(hook, "default_conn_name") + except Exception: + # Lets error in tests though! + if "PYTEST_CURRENT_TEST" in os.environ: + raise + return None + if REMOTE_LOGGING: ELASTICSEARCH_HOST: str | None = conf.get("elasticsearch", "HOST") @@ -151,6 +172,7 @@ if remote_base_log_folder.startswith("s3://"): from airflow.providers.amazon.aws.log.s3_task_handler import S3RemoteLogIO + _default_conn_name_from("airflow.providers.amazon.aws.hooks.s3", "S3Hook") REMOTE_TASK_LOG = S3RemoteLogIO( **( { @@ -166,6 +188,7 @@ elif remote_base_log_folder.startswith("cloudwatch://"): from airflow.providers.amazon.aws.log.cloudwatch_task_handler import CloudWatchRemoteLogIO + _default_conn_name_from("airflow.providers.amazon.aws.hooks.logs", "AwsLogsHook") url_parts = urlsplit(remote_base_log_folder) REMOTE_TASK_LOG = CloudWatchRemoteLogIO( **( @@ -180,8 +203,9 @@ ) remote_task_handler_kwargs = {} elif remote_base_log_folder.startswith("gs://"): - from airflow.providers.google.cloud.logs.gcs_task_handler import GCSRemoteLogIO + from airflow.providers.google.cloud.log.gcs_task_handler import GCSRemoteLogIO + _default_conn_name_from("airflow.providers.google.cloud.hooks.gcs", "GCSHook") key_path = conf.get_mandatory_value("logging", "google_key_path", fallback=None) REMOTE_TASK_LOG = GCSRemoteLogIO( @@ -199,6 +223,7 @@ elif remote_base_log_folder.startswith("wasb"): from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbRemoteLogIO + _default_conn_name_from("airflow.providers.microsoft.azure.hooks.wasb", "WasbHook") wasb_log_container = conf.get_mandatory_value( "azure_remote_logging", "remote_wasb_log_container", fallback="airflow-logs" ) @@ -232,6 +257,8 @@ elif remote_base_log_folder.startswith("oss://"): from airflow.providers.alibaba.cloud.log.oss_task_handler import OSSRemoteLogIO + _default_conn_name_from("airflow.providers.alibaba.cloud.hooks.oss", "OSSHook") + REMOTE_TASK_LOG = OSSRemoteLogIO( **( { @@ -246,11 +273,13 @@ elif remote_base_log_folder.startswith("hdfs://"): from airflow.providers.apache.hdfs.log.hdfs_task_handler import HdfsRemoteLogIO + _default_conn_name_from("airflow.providers.apache.hdfs.hooks.webhdfs", "WebHDFSHook") + REMOTE_TASK_LOG = HdfsRemoteLogIO( **( { "base_log_folder": BASE_LOG_FOLDER, - "remote_base": remote_base_log_folder, + "remote_base": urlsplit(remote_base_log_folder).path, "delete_local_copy": delete_local_copy, } | remote_task_handler_kwargs diff --git a/airflow-core/src/airflow/config_templates/config.yml b/airflow-core/src/airflow/config_templates/config.yml index 1b71e5ae019e6..3a824fbcda6d7 100644 --- a/airflow-core/src/airflow/config_templates/config.yml +++ b/airflow-core/src/airflow/config_templates/config.yml @@ -116,7 +116,7 @@ core: schedulers in your cluster, is the maximum number of task instances with the running state in the metadata database. The value must be larger or equal 1. version_added: ~ - type: string + type: integer example: ~ default: "32" max_active_tasks_per_dag: @@ -129,7 +129,7 @@ core: An example scenario when this would be useful is when you want to stop a new dag with an early start date from stealing all the executor slots in a cluster. version_added: 2.2.0 - type: string + type: integer example: ~ default: "16" dags_are_paused_at_creation: @@ -145,7 +145,7 @@ core: if it reaches the limit. This is configurable at the DAG level with ``max_active_runs``, which is defaulted as ``[core] max_active_runs_per_dag``. version_added: ~ - type: string + type: integer example: ~ default: "16" max_consecutive_failed_dag_runs_per_dag: @@ -156,7 +156,7 @@ core: If not specified, then the value is considered as 0, meaning that the dags are never paused out by default. version_added: 2.9.0 - type: string + type: integer example: ~ default: "0" mp_start_method: @@ -207,13 +207,6 @@ core: sensitive: true example: ~ default: "{FERNET_KEY}" - donot_pickle: - description: | - Whether to disable pickling dags - version_added: ~ - type: string - example: ~ - default: "True" dagbag_import_timeout: description: | How long before timing out a python file import @@ -281,7 +274,7 @@ core: When a task is killed forcefully, this is the amount of time in seconds that it has to cleanup after it is sent a SIGTERM, before it is SIGKILLED version_added: ~ - type: string + type: integer example: ~ default: "60" dag_run_conf_overrides_params: @@ -314,7 +307,7 @@ core: description: | The number of retries each task is going to have by default. Can be overridden at dag or task level. version_added: 1.10.6 - type: string + type: integer example: ~ default: "0" default_task_retry_delay: @@ -361,7 +354,7 @@ core: description: | Updating serialized DAG can not be faster than a minimum interval to reduce database write rate. version_added: 1.10.7 - type: string + type: integer example: ~ default: "30" compress_serialized_dags: @@ -372,7 +365,7 @@ core: This will disable the DAG dependencies view version_added: 2.3.0 - type: string + type: boolean example: ~ default: "False" min_serialized_dag_fetch_interval: @@ -380,7 +373,7 @@ core: Fetching serialized DAG can not be faster than a minimum interval to reduce database read rate. This config controls when your DAGs are updated in the Webserver version_added: 1.10.12 - type: string + type: integer example: ~ default: "10" max_num_rendered_ti_fields_per_task: @@ -444,7 +437,7 @@ core: deployment where the ``default_pool`` is already created. For existing deployments, users can change the number of slots using Webserver, API or the CLI version_added: 2.2.0 - type: string + type: integer example: ~ default: "128" max_map_length: @@ -482,33 +475,6 @@ core: sensitive: true default: ~ example: '{"some_param": "some_value"}' - database_access_isolation: - description: (experimental) Whether components should use Airflow Internal API for DB connectivity. - version_added: 2.6.0 - type: boolean - example: ~ - default: "False" - internal_api_url: - description: | - (experimental) Airflow Internal API url. - Only used if ``[core] database_access_isolation`` is ``True``. - version_added: 2.6.0 - type: string - default: ~ - example: 'http://localhost:8080' - internal_api_secret_key: - description: | - Secret key used to authenticate internal API clients to core. It should be as random as possible. - However, when running more than 1 instances of webserver / internal API services, make sure all - of them use the same ``secret_key`` otherwise calls will fail on authentication. - The authentication token generated using the secret key has a short expiry time though - make - sure that time on ALL the machines that you run airflow components on is synchronized - (for example using ntpd) otherwise you might get "forbidden" errors when the logs are accessed. - version_added: 2.10.0 - type: string - sensitive: true - example: ~ - default: "{SECRET_KEY}" test_connection: description: | The ability to allow testing connections across Airflow UI, API and CLI. @@ -536,11 +502,13 @@ core: default: "4096" execution_api_server_url: description: | - The url of the execution api server. + The url of the execution api server. Default is ``{BASE_URL}/execution/`` + where ``{BASE_URL}`` is the base url of the API Server. If ``{BASE_URL}`` is not set, + it will use ``http://localhost:8080`` as the default base url. version_added: 3.0.0 type: string example: ~ - default: "http://localhost:8080/execution/" + default: ~ database: description: ~ options: @@ -564,6 +532,18 @@ database: sensitive: true example: ~ default: "sqlite:///{AIRFLOW_HOME}/airflow.db" + sql_alchemy_conn_async: + description: | + The SQLAlchemy connection string to the metadata database used for async connections. + If this is not set, Airflow automatically derives a string by converting ``sql_alchemy_conn``. + Unfortunately, this conversion logic does not always work due to various incompatibilities + between sync and async db driver implementations. This sets the connection string directly + without any conversion instead. + version_added: 3.1.0 + type: string + sensitive: true + example: "postgresql+asyncpg://postgres:airflow@postgres/airflow" + default: ~ sql_alchemy_engine_args: description: | Extra engine specific keyword args passed to SQLAlchemy's create_engine, as a JSON-encoded value @@ -604,7 +584,7 @@ database: The SQLAlchemy pool size is the maximum number of database connections in the pool. 0 indicates no limit. version_added: 2.3.0 - type: string + type: integer example: ~ default: "5" sql_alchemy_max_overflow: @@ -619,7 +599,7 @@ database: max_overflow can be set to ``-1`` to indicate no overflow limit; no limit will be placed on the total number of concurrent connections. Defaults to ``10``. version_added: 2.3.0 - type: string + type: integer example: ~ default: "10" sql_alchemy_pool_recycle: @@ -629,7 +609,7 @@ database: not apply to sqlite. If the number of DB connections is ever exceeded, a lower config value will allow the system to recover faster. version_added: 2.3.0 - type: string + type: integer example: ~ default: "1800" sql_alchemy_pool_pre_ping: @@ -640,7 +620,7 @@ database: `__ for more details. version_added: 2.3.0 - type: string + type: boolean example: ~ default: "True" sql_alchemy_schema: @@ -661,10 +641,23 @@ database: `__ e.g ``connect_args={"timeout":30}`` can be defined in ``airflow_local_settings.py`` and can be imported as shown below + + *Changed in 3.1.0*: This configuration is only applied to synchronous engines, such as psycopg2. + See ``sql_alchemy_connect_args_async``. version_added: 2.3.0 type: string example: 'airflow_local_settings.connect_args' default: ~ + sql_alchemy_connect_args_async: + description: | + Import path for connect args in SQLAlchemy. Defaults to an empty dict. + This is similar to ``sql_alchemy_connect_args``, but only for async connections. + + This configuration is only applied to async engines, such as asyncpg. + version_added: 3.1.0 + type: string + example: 'airflow_local_settings.connect_args_async' + default: ~ sql_alchemy_session_maker: description: | Important Warning: Use of sql_alchemy_session_maker Highly Discouraged @@ -697,7 +690,8 @@ database: external_db_managers: description: | List of DB managers to use to migrate external tables in airflow database. The managers must inherit - from BaseDBManager + from BaseDBManager. If ``FabAuthManager`` is configured in the environment, + ``airflow.providers.fab.auth_manager.models.db.FABDBManager`` is automatically added. version_added: 3.0.0 type: string example: "airflow.providers.fab.auth_manager.models.db.FABDBManager" @@ -1060,7 +1054,7 @@ metrics: description: | Enables sending metrics to StatsD. version_added: 2.0.0 - type: string + type: boolean example: ~ default: "False" statsd_host: @@ -1074,14 +1068,14 @@ metrics: description: | Enables the statsd host to be resolved into IPv6 address version_added: 3.0.0 - type: string + type: boolean example: ~ default: "False" statsd_port: description: | Specifies the port on which the StatsD daemon (or server) is listening to version_added: 2.0.0 - type: string + type: integer example: ~ default: "8125" statsd_prefix: @@ -1109,7 +1103,7 @@ metrics: description: | To enable datadog integration to send airflow metrics. version_added: 2.0.0 - type: string + type: boolean example: ~ default: "False" statsd_datadog_tags: @@ -1157,7 +1151,7 @@ metrics: description: | Enables sending metrics to OpenTelemetry. version_added: 2.6.0 - type: string + type: boolean example: ~ default: "False" otel_host: @@ -1172,7 +1166,7 @@ metrics: description: | Specifies the port of the OpenTelemetry Collector that is listening to. version_added: 2.6.0 - type: string + type: integer example: ~ default: "8889" otel_prefix: @@ -1194,7 +1188,7 @@ metrics: description: | If ``True``, all metrics are also emitted to the console. Defaults to ``False``. version_added: 2.7.0 - type: string + type: boolean example: ~ default: "False" otel_service: @@ -1211,7 +1205,7 @@ metrics: you need to configure the SSL certificate and key within the OpenTelemetry collector's ``config.yml`` file. version_added: 2.7.0 - type: string + type: boolean example: ~ default: "False" traces: @@ -1222,7 +1216,7 @@ traces: description: | Enables sending traces to OpenTelemetry. version_added: 2.10.0 - type: string + type: boolean example: ~ default: "False" otel_host: @@ -1237,7 +1231,7 @@ traces: description: | Specifies the port of the OpenTelemetry Collector that is listening to. version_added: 2.10.0 - type: string + type: integer example: ~ default: "8889" otel_service: @@ -1251,7 +1245,7 @@ traces: description: | If True, all traces are also emitted to the console. Defaults to False. version_added: 2.10.0 - type: string + type: boolean example: ~ default: "False" otel_ssl_active: @@ -1261,6 +1255,13 @@ traces: you need to configure the SSL certificate and key within the OpenTelemetry collector's config.yml file. version_added: 2.10.0 + type: boolean + example: ~ + default: "False" + otel_debug_traces_on: + description: | + If True, then traces from Airflow internal methods are exported. Defaults to False. + version_added: 3.1.0 type: string example: ~ default: "False" @@ -1315,6 +1316,49 @@ secrets: api: description: ~ options: + instance_name: + description: | + Sets a custom page title for the DAGs overview page and site title for all pages + version_added: 2.1.0 + type: string + example: ~ + default: + enable_swagger_ui: + description: | + Boolean for running SwaggerUI in the webserver. + version_added: 2.6.0 + type: boolean + example: ~ + default: "True" + secret_key: + description: | + Secret key used to run your api server. It should be as random as possible. However, when running + more than 1 instances of the api, make sure all of them use the same ``secret_key`` otherwise + one of them will error with "CSRF session token is missing". + The api key is also used to authorize requests to Celery workers when logs are retrieved. + The token generated using the secret key has a short expiry time though - make sure that time on + ALL the machines that you run airflow components on is synchronized (for example using ntpd) + otherwise you might get "forbidden" errors when the logs are accessed. + version_added: ~ + type: string + sensitive: true + example: ~ + default: "{SECRET_KEY}" + expose_config: + description: | + Expose the configuration file in the web server. Set to ``non-sensitive-only`` to show all values + except those that have security implications. ``True`` shows all values. ``False`` hides the + configuration completely. + version_added: ~ + type: string + example: ~ + default: "False" + expose_stacktrace: + description: Expose stacktrace in the web server + version_added: ~ + type: string + example: ~ + default: "False" base_url: description: | The base url of the API server. Airflow cannot guess what domain or CNAME you are using. @@ -1342,27 +1386,30 @@ api: description: | Number of workers to run on the API server version_added: ~ - type: string + type: integer example: ~ default: "4" worker_timeout: description: | Number of seconds the API server waits before timing out on a worker version_added: ~ - type: string + type: integer example: ~ default: "120" - access_logfile: + log_config: description: | - Log files for the api server. '-' means log to stderr. + Path to the logging configuration file for the uvicorn server. + If not set, the default uvicorn logging configuration will be used. version_added: ~ type: string - example: ~ - default: "-" + example: path/to/logging_config.yaml + default: ~ ssl_cert: description: | Paths to the SSL certificate and key for the api server. When both are provided SSL will be enabled. This does not change the api server port. + The same SSL certificate will also be loaded into the worker to enable + it to be trusted when a self-signed certificate is used. version_added: ~ type: string example: ~ @@ -1418,13 +1465,59 @@ api: version_added: 2.2.0 example: ~ default: "" - enable_xcom_deserialize_support: + grid_view_sorting_order: description: | - Indicates whether the **xcomEntries** endpoint supports the **deserialize** - flag. If set to ``False``, setting this flag in a request would result in a - 400 Bad Request error. - type: boolean + Sorting order in grid view. Valid values are: ``topological``, ``hierarchical_alphabetical`` version_added: 2.7.0 + type: string + example: ~ + default: "topological" + log_fetch_timeout_sec: + description: | + The amount of time (in secs) webserver will wait for initial handshake + while fetching logs from other worker machine + version_added: ~ + type: string + example: ~ + default: "5" + hide_paused_dags_by_default: + description: | + By default, the webserver shows paused DAGs. Flip this to hide paused + DAGs by default + version_added: ~ + type: boolean + example: ~ + default: "False" + page_size: + description: | + Consistent page size across all listing views in the UI + version_added: ~ + type: integer + example: ~ + default: "50" + default_wrap: + description: | + Default setting for wrap toggle on DAG code and TI log views. + version_added: 1.10.4 + type: boolean + example: ~ + default: "False" + auto_refresh_interval: + description: | + How frequently, in seconds, the DAG data will auto-refresh in graph or grid view + when auto-refresh is turned on + version_added: 2.2.0 + type: integer + example: ~ + default: "3" + require_confirmation_dag_change: + description: | + Require confirmation when changing a DAG in the web UI. This is to prevent accidental changes + to a DAG that may be running on sensitive environments like production. + When set to ``True``, confirmation dialog will be shown when a user tries to Pause/Unpause, + Trigger a DAG + version_added: 2.9.0 + type: boolean example: ~ default: "False" workers: @@ -1486,6 +1579,15 @@ workers: type: float example: ~ default: "90.0" + socket_cleanup_timeout: + description: | + Number of seconds to wait after a task process exits before forcibly closing any + remaining communication sockets. This helps prevent the task supervisor from hanging + indefinitely due to missed EOF signals. + version_added: 3.0.3 + type: float + example: ~ + default: "60.0" api_auth: description: Settings relating to authentication on the Airflow APIs options: @@ -1712,182 +1814,6 @@ operators: type: string example: ~ default: "default" -webserver: - description: ~ - options: - access_denied_message: - description: | - The message displayed when a user attempts to execute actions beyond their authorised privileges. - version_added: 2.7.0 - type: string - example: ~ - default: "Access is Denied" - secret_key: - description: | - Secret key used to run your api server. It should be as random as possible. However, when running - more than 1 instances of webserver, make sure all of them use the same ``secret_key`` otherwise - one of them will error with "CSRF session token is missing". - The webserver key is also used to authorize requests to Celery workers when logs are retrieved. - The token generated using the secret key has a short expiry time though - make sure that time on - ALL the machines that you run airflow components on is synchronized (for example using ntpd) - otherwise you might get "forbidden" errors when the logs are accessed. - version_added: ~ - type: string - sensitive: true - example: ~ - default: "{SECRET_KEY}" - expose_config: - description: | - Expose the configuration file in the web server. Set to ``non-sensitive-only`` to show all values - except those that have security implications. ``True`` shows all values. ``False`` hides the - configuration completely. - version_added: ~ - type: string - example: ~ - default: "False" - expose_hostname: - description: | - Expose hostname in the web server - version_added: 1.10.8 - type: string - example: ~ - default: "False" - grid_view_sorting_order: - description: | - Sorting order in grid view. Valid values are: ``topological``, ``hierarchical_alphabetical`` - version_added: 2.7.0 - type: string - example: ~ - default: "topological" - log_fetch_timeout_sec: - description: | - The amount of time (in secs) webserver will wait for initial handshake - while fetching logs from other worker machine - version_added: ~ - type: string - example: ~ - default: "5" - hide_paused_dags_by_default: - description: | - By default, the webserver shows paused DAGs. Flip this to hide paused - DAGs by default - version_added: ~ - type: string - example: ~ - default: "False" - page_size: - description: | - Consistent page size across all listing views in the UI - version_added: ~ - type: string - example: ~ - default: "50" - navbar_color: - description: | - Define the color of navigation bar - version_added: ~ - type: string - example: ~ - default: "#fff" - navbar_text_color: - description: | - Define the color of text in the navigation bar - version_added: 2.8.0 - type: string - example: ~ - default: "#51504f" - navbar_hover_color: - description: | - Define the color of navigation bar links when hovered - version_added: 2.9.0 - type: string - example: ~ - default: "#eee" - navbar_text_hover_color: - description: | - Define the color of text in the navigation bar when hovered - version_added: 2.9.0 - type: string - example: ~ - default: "#51504f" - default_wrap: - description: | - Default setting for wrap toggle on DAG code and TI log views. - version_added: 1.10.4 - type: boolean - example: ~ - default: "False" - x_frame_enabled: - description: | - Allow the UI to be rendered in a frame - version_added: 1.10.8 - type: boolean - example: ~ - default: "True" - instance_name: - description: | - Sets a custom page title for the DAGs overview page and site title for all pages - version_added: 2.1.0 - type: string - example: ~ - default: - instance_name_has_markup: - description: | - Whether the custom page title for the DAGs overview page contains any Markup language - version_added: 2.3.0 - type: boolean - example: ~ - default: "False" - auto_refresh_interval: - description: | - How frequently, in seconds, the DAG data will auto-refresh in graph or grid view - when auto-refresh is turned on - version_added: 2.2.0 - type: integer - example: ~ - default: "3" - warn_deployment_exposure: - description: | - Boolean for displaying warning for publicly viewable deployment - version_added: 2.3.0 - type: boolean - example: ~ - default: "True" - audit_view_excluded_events: - description: | - Comma separated string of view events to exclude from dag audit view. - All other events will be added minus the ones passed here. - The audit logs in the db will not be affected by this parameter. - version_added: 2.3.0 - type: string - example: "cli_task_run,running,success" - default: ~ - audit_view_included_events: - description: | - Comma separated string of view events to include in dag audit view. - If passed, only these events will populate the dag audit view. - The audit logs in the db will not be affected by this parameter. - version_added: 2.3.0 - type: string - example: "dagrun_cleared,failed" - default: ~ - enable_swagger_ui: - description: | - Boolean for running SwaggerUI in the webserver. - version_added: 2.6.0 - type: boolean - example: ~ - default: "True" - require_confirmation_dag_change: - description: | - Require confirmation when changing a DAG in the web UI. This is to prevent accidental changes - to a DAG that may be running on sensitive environments like production. - When set to ``True``, confirmation dialog will be shown when a user tries to Pause/Unpause, - Trigger a DAG - version_added: 2.9.0 - type: boolean - example: ~ - default: "False" email: description: | Configuration email backend and whether to @@ -1978,21 +1904,21 @@ smtp: description: | Determines whether to use the STARTTLS command when connecting to the SMTP server. version_added: ~ - type: string + type: boolean example: ~ default: "True" smtp_ssl: description: | Determines whether to use an SSL connection when talking to the SMTP server. version_added: ~ - type: string + type: boolean example: ~ default: "False" smtp_port: description: | Defines the port number on which Airflow connects to the SMTP server to send email notifications. version_added: ~ - type: string + type: integer example: ~ default: "25" smtp_mail_from: @@ -2029,7 +1955,7 @@ sentry: sentry_on: description: Enable error reporting to Sentry version_added: 2.0.0 - type: string + type: boolean example: ~ default: "false" sentry_dsn: @@ -2230,16 +2156,6 @@ scheduler: type: integer default: "20" see_also: ":ref:`scheduler:ha:tunables`" - parsing_pre_import_modules: - description: | - The scheduler reads dag files to extract the airflow modules that are going to be used, - and imports them ahead of time to avoid having to re-do it for each parsing process. - This flag can be set to ``False`` to disable this behavior in case an airflow module needs - to be freshly imported each time (at the cost of increased DAG parsing time). - version_added: 2.6.0 - type: boolean - example: ~ - default: "True" dag_stale_not_seen_duration: description: | Time in seconds after which dags, which were not updated by Dag Processor are deactivated. @@ -2340,7 +2256,7 @@ triggerer: description: | How many triggers a single Triggerer will run at once, by default. version_added: 2.2.0 - type: string + type: integer example: ~ default: "1000" job_heartbeat_sec: @@ -2535,7 +2451,7 @@ dag_processor: description: | How long before timing out a DagFileProcessor, which processes a dag file version_added: ~ - type: string + type: integer example: ~ default: "50" print_stats_interval: @@ -2594,3 +2510,13 @@ dag_processor: type: integer example: ~ default: "10" + parsing_pre_import_modules: + description: | + The dag_processor reads dag files to extract the airflow modules that are going to be used, + and imports them ahead of time to avoid having to re-do it for each parsing process. + This flag can be set to ``False`` to disable this behavior in case an airflow module needs + to be freshly imported each time (at the cost of increased DAG parsing time). + version_added: 2.6.0 + type: boolean + example: ~ + default: "True" diff --git a/airflow-core/src/airflow/config_templates/unit_tests.cfg b/airflow-core/src/airflow/config_templates/unit_tests.cfg index a19203e19d956..acd2aa771332b 100644 --- a/airflow-core/src/airflow/config_templates/unit_tests.cfg +++ b/airflow-core/src/airflow/config_templates/unit_tests.cfg @@ -48,8 +48,6 @@ executor = LocalExecutor dags_are_paused_at_creation = False # We want to load examples load_examples = True -# We want pickling to be enabled in tests -donot_pickle = False # No default impersonation in tests - tests that run impersonation should set this explicitly default_impersonation = # We want to use unit test mode (though technically it is not needed as we set it in pytest fixture diff --git a/airflow-core/src/airflow/configuration.py b/airflow-core/src/airflow/configuration.py index c48377c333ccb..0af466d30e6cf 100644 --- a/airflow-core/src/airflow/configuration.py +++ b/airflow-core/src/airflow/configuration.py @@ -39,7 +39,7 @@ from io import StringIO from json.decoder import JSONDecodeError from re import Pattern -from typing import IO, TYPE_CHECKING, Any, Union +from typing import IO, TYPE_CHECKING, Any from urllib.parse import urlsplit from packaging.version import parse as parse_version @@ -64,9 +64,9 @@ _SQLITE3_VERSION_PATTERN = re.compile(r"(?P^\d+(?:\.\d+)*)\D?.*$") -ConfigType = Union[str, int, float, bool] +ConfigType = str | int | float | bool ConfigOptionsDictType = dict[str, ConfigType] -ConfigSectionSourcesType = dict[str, Union[str, tuple[str, str]]] +ConfigSectionSourcesType = dict[str, str | tuple[str, str]] ConfigSourcesType = dict[str, ConfigSectionSourcesType] ENV_VAR_PREFIX = "AIRFLOW__" @@ -355,6 +355,25 @@ def sensitive_config_values(self) -> set[tuple[str, str]]: ("api", "ssl_key"): ("webserver", "web_server_ssl_key", "3.0"), ("api", "access_logfile"): ("webserver", "access_logfile", "3.0"), ("triggerer", "capacity"): ("triggerer", "default_capacity", "3.0"), + ("api", "expose_config"): ("webserver", "expose_config", "3.0.1"), + ("fab", "access_denied_message"): ("webserver", "access_denied_message", "3.0.2"), + ("fab", "expose_hostname"): ("webserver", "expose_hostname", "3.0.2"), + ("fab", "navbar_color"): ("webserver", "navbar_color", "3.0.2"), + ("fab", "navbar_text_color"): ("webserver", "navbar_text_color", "3.0.2"), + ("fab", "navbar_hover_color"): ("webserver", "navbar_hover_color", "3.0.2"), + ("fab", "navbar_text_hover_color"): ("webserver", "navbar_text_hover_color", "3.0.2"), + ("api", "secret_key"): ("webserver", "secret_key", "3.0.2"), + ("api", "enable_swagger_ui"): ("webserver", "enable_swagger_ui", "3.0.2"), + ("dag_processor", "parsing_pre_import_modules"): ("scheduler", "parsing_pre_import_modules", "3.0.4"), + ("api", "grid_view_sorting_order"): ("webserver", "grid_view_sorting_order", "3.1.0"), + ("api", "log_fetch_timeout_sec"): ("webserver", "log_fetch_timeout_sec", "3.1.0"), + ("api", "hide_paused_dags_by_default"): ("webserver", "hide_paused_dags_by_default", "3.1.0"), + ("api", "page_size"): ("webserver", "page_size", "3.1.0"), + ("api", "default_wrap"): ("webserver", "default_wrap", "3.1.0"), + ("api", "auto_refresh_interval"): ("webserver", "auto_refresh_interval", "3.1.0"), + ("api", "require_confirmation_dag_change"): ("webserver", "require_confirmation_dag_change", "3.1.0"), + ("api", "instance_name"): ("webserver", "instance_name", "3.1.0"), + ("api", "log_config"): ("api", "access_logfile", "3.1.0"), } # A mapping of new section -> (old section, since_version). @@ -406,7 +425,7 @@ def inversed_deprecated_sections(self): # celery_logging_level can be empty, which uses logging_level as fallback ("logging", "celery_logging_level"): [*_available_logging_levels, ""], ("webserver", "analytical_tool"): ["google_analytics", "metarouter", "segment", "matomo", ""], - ("webserver", "grid_view_sorting_order"): ["topological", "hierarchical_alphabetical"], + ("api", "grid_view_sorting_order"): ["topological", "hierarchical_alphabetical"], } upgraded_values: dict[tuple[str, str], str] @@ -942,10 +961,10 @@ def get_mandatory_list_value(self, section: str, key: str, **kwargs) -> list[str @overload # type: ignore[override] def get(self, section: str, key: str, fallback: str = ..., **kwargs) -> str: ... - @overload # type: ignore[override] + @overload def get(self, section: str, key: str, **kwargs) -> str | None: ... - def get( # type: ignore[override,misc] + def get( # type: ignore[misc] self, section: str, key: str, @@ -1294,7 +1313,7 @@ def gettimedelta( def read( self, - filenames: (str | bytes | os.PathLike | Iterable[str | bytes | os.PathLike]), + filenames: str | bytes | os.PathLike | Iterable[str | bytes | os.PathLike], encoding=None, ): super().read(filenames=filenames, encoding=encoding) @@ -2083,7 +2102,7 @@ def load_standard_airflow_configuration(airflow_config_parser: AirflowConfigPars ) else: # there - AIRFLOW_HOME = airflow_config_parser.get("core", "airflow_home") # type: ignore[assignment] + AIRFLOW_HOME = airflow_config_parser.get("core", "airflow_home") warnings.warn(msg, category=DeprecationWarning, stacklevel=1) diff --git a/airflow-core/src/airflow/dag_processing/bundles/base.py b/airflow-core/src/airflow/dag_processing/bundles/base.py index 5d49bf43fd4a4..1ab14abd047fb 100644 --- a/airflow-core/src/airflow/dag_processing/bundles/base.py +++ b/airflow-core/src/airflow/dag_processing/bundles/base.py @@ -22,6 +22,7 @@ import os import shutil import tempfile +import warnings from abc import ABC, abstractmethod from contextlib import contextmanager from dataclasses import dataclass, field @@ -35,7 +36,6 @@ from sqlalchemy_utils.types.enriched_datetime.pendulum_datetime import pendulum from airflow.configuration import conf -from airflow.dag_processing.bundles.manager import DagBundlesManager if TYPE_CHECKING: from pendulum import DateTime @@ -217,7 +217,10 @@ def remove_stale_bundle_versions(self): This isn't really necessary on worker types that don't share storage with other processes. """ + from airflow.dag_processing.bundles.manager import DagBundlesManager + log.info("checking for stale bundle versions locally") + bundles = list(DagBundlesManager().get_all_dag_bundles()) for bundle in bundles: if not bundle.supports_versioning: @@ -256,6 +259,7 @@ def __init__( name: str, refresh_interval: int = conf.getint("dag_processor", "refresh_interval"), version: str | None = None, + view_url_template: str | None = None, ) -> None: self.name = name self.version = version @@ -268,6 +272,8 @@ def __init__( self.versions_dir = get_bundle_versions_base_folder(bundle_name=self.name) """Where bundle versions are stored locally for this bundle.""" + self._view_url_template = view_url_template + def initialize(self) -> None: """ Initialize the bundle. @@ -280,9 +286,21 @@ def initialize(self) -> None: This method must ultimately be safe to call concurrently from different threads or processes. If it isn't naturally safe, you'll need to make it so with some form of locking. There is a `lock` context manager on this class available for this purpose. + + If you override this method, ensure you call `super().initialize()` + at the end of your method, after the bundle is initialized, not the beginning. """ self.is_initialized = True + # Check if the bundle path exists after initialization + bundle_path = self.path + if not bundle_path.exists(): + log.warning( + "Bundle '%s' path does not exist: %s. This may cause DAG loading issues.", + self.name, + bundle_path, + ) + @property @abstractmethod def path(self) -> Path: @@ -316,10 +334,34 @@ def view_url(self, version: str | None = None) -> str | None: URL to view the bundle on an external website. This is shown to users in the Airflow UI, allowing them to navigate to this url for more details about that version of the bundle. This needs to function without `initialize` being called. - :param version: Version to view :return: URL to view the bundle """ + warnings.warn( + "The 'view_url' method is deprecated and will be removed in a future version. " + "Use 'view_url_template' instead.", + DeprecationWarning, + stacklevel=2, + ) + return None + + def view_url_template(self) -> str | None: + """ + URL template to view the bundle on an external website. + + This is shown to users in the Airflow UI, allowing them to navigate to + this url for more details about that version of the bundle. + + The template should use format string placeholders like {version}, {subdir}, etc. + Common placeholders: + - {version}: The version identifier + - {subdir}: The subdirectory within the bundle (if applicable) + + This needs to function without `initialize` being called. + + :return: URL template string or None if not applicable + """ + return self._view_url_template @contextmanager def lock(self): diff --git a/airflow-core/src/airflow/dag_processing/bundles/manager.py b/airflow-core/src/airflow/dag_processing/bundles/manager.py index 18e855230bce0..cf3b7c5104824 100644 --- a/airflow-core/src/airflow/dag_processing/bundles/manager.py +++ b/airflow-core/src/airflow/dag_processing/bundles/manager.py @@ -16,8 +16,12 @@ # under the License. from __future__ import annotations +import warnings from typing import TYPE_CHECKING +from itsdangerous import URLSafeSerializer +from sqlalchemy import delete + from airflow.configuration import conf from airflow.exceptions import AirflowConfigException from airflow.models.dagbundle import DagBundleModel @@ -79,6 +83,61 @@ def _add_example_dag_bundle(config_list): ) +def _is_safe_bundle_url(url: str) -> bool: + """ + Check if a bundle URL is safe to use. + + This function validates that the URL: + - Uses HTTP or HTTPS schemes (no JavaScript, data, or other schemes) + - Is properly formatted + - Doesn't contain malicious content + """ + import logging + from urllib.parse import urlparse + + logger = logging.getLogger(__name__) + + if not url: + return False + + try: + parsed = urlparse(url) + if parsed.scheme not in {"http", "https"}: + logger.error( + "Bundle URL uses unsafe scheme '%s'. Only 'http' and 'https' are allowed", parsed.scheme + ) + return False + + if not parsed.netloc: + logger.error("Bundle URL '%s' has no network location", url) + return False + + if any(ord(c) < 32 for c in url): + logger.error("Bundle URL '%s' contains control characters (ASCII < 32)", url) + return False + + return True + except Exception as e: + logger.error("Failed to parse bundle URL '%s': %s", url, str(e)) + return False + + +def _sign_bundle_url(url: str, bundle_name: str) -> str: + """ + Sign a bundle URL for integrity verification. + + :param url: The URL to sign + :param bundle_name: The name of the bundle (used in the payload) + :return: The signed URL token + """ + serializer = URLSafeSerializer(conf.get_mandatory_value("core", "fernet_key")) + payload = { + "url": url, + "bundle_name": bundle_name, + } + return serializer.dumps(payload) + + class DagBundlesManager(LoggingMixin): """Manager for DAG bundles.""" @@ -122,17 +181,82 @@ def parse_config(self) -> None: @provide_session def sync_bundles_to_db(self, *, session: Session = NEW_SESSION) -> None: self.log.debug("Syncing DAG bundles to the database") + + def _extract_and_sign_template(bundle_name: str) -> tuple[str | None, dict]: + bundle_instance = self.get_bundle(name) + new_template_ = bundle_instance.view_url_template() + new_params_ = self._extract_template_params(bundle_instance) + if new_template_: + if not _is_safe_bundle_url(new_template_): + self.log.warning( + "Bundle %s has unsafe URL template '%s', skipping URL update", + bundle_name, + new_template_, + ) + new_template_ = None + else: + # Sign the URL for integrity verification + new_template_ = _sign_bundle_url(new_template_, bundle_name) + self.log.debug("Signed URL template for bundle %s", bundle_name) + return new_template_, new_params_ + stored = {b.name: b for b in session.query(DagBundleModel).all()} + for name in self._bundle_config.keys(): if bundle := stored.pop(name, None): bundle.active = True + new_template, new_params = _extract_and_sign_template(name) + if new_template != bundle.signed_url_template: + bundle.signed_url_template = new_template + self.log.debug("Updated URL template for bundle %s", name) + if new_params != bundle.template_params: + bundle.template_params = new_params + self.log.debug("Updated template parameters for bundle %s", name) else: - session.add(DagBundleModel(name=name)) + new_template, new_params = _extract_and_sign_template(name) + new_bundle = DagBundleModel(name=name) + new_bundle.signed_url_template = new_template + new_bundle.template_params = new_params + + session.add(new_bundle) self.log.info("Added new DAG bundle %s to the database", name) for name, bundle in stored.items(): bundle.active = False self.log.warning("DAG bundle %s is no longer found in config and has been disabled", name) + from airflow.models.errors import ParseImportError + + session.execute(delete(ParseImportError).where(ParseImportError.bundle_name == name)) + self.log.info("Deleted import errors for bundle %s which is no longer configured", name) + + @staticmethod + def _extract_template_params(bundle_instance: BaseDagBundle) -> dict: + """ + Extract template parameters from a bundle instance's view_url_template method. + + :param bundle_instance: The bundle instance to extract parameters from + :return: Dictionary of template parameters + """ + import re + + params: dict[str, str] = {} + template = bundle_instance.view_url_template() + + if not template: + return params + + # Extract template placeholders using regex + # This matches {placeholder} patterns in the template + PLACEHOLDER_PATTERN = re.compile(r"\{([^}]+)\}") + placeholders = PLACEHOLDER_PATTERN.findall(template) + + # Extract values for each placeholder found in the template + for placeholder in placeholders: + field_value = getattr(bundle_instance, placeholder, None) + if field_value: + params[placeholder] = field_value + + return params def get_bundle(self, name: str, version: str | None = None) -> BaseDagBundle: """ @@ -159,5 +283,12 @@ def get_all_dag_bundles(self) -> Iterable[BaseDagBundle]: yield class_(name=name, version=None, **kwargs) def view_url(self, name: str, version: str | None = None) -> str | None: + warnings.warn( + "The 'view_url' method is deprecated and will be removed when providers " + "have Airflow 3.1 as the minimum supported version. " + "Use DagBundleModel.render_url() instead.", + DeprecationWarning, + stacklevel=2, + ) bundle = self.get_bundle(name, version) return bundle.view_url(version=version) diff --git a/airflow-core/src/airflow/dag_processing/bundles/provider.yaml b/airflow-core/src/airflow/dag_processing/bundles/provider.yaml index 4d209d351ed4a..6ce726e5e84c3 100644 --- a/airflow-core/src/airflow/dag_processing/bundles/provider.yaml +++ b/airflow-core/src/airflow/dag_processing/bundles/provider.yaml @@ -23,7 +23,10 @@ description: | state: not-ready source-date-epoch: 1726861127 -# note that those versions are maintained by release manager - do not update them manually +# Note that those versions are maintained by release manager - do not update them manually +# with the exception of case where other provider in sources has >= new provider version. +# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have +# to be done in the same PR versions: - 1.0.0 diff --git a/airflow-core/src/airflow/dag_processing/collection.py b/airflow-core/src/airflow/dag_processing/collection.py index 591ad687f9953..6b65a2335713a 100644 --- a/airflow-core/src/airflow/dag_processing/collection.py +++ b/airflow-core/src/airflow/dag_processing/collection.py @@ -31,10 +31,11 @@ import traceback from typing import TYPE_CHECKING, NamedTuple -from sqlalchemy import delete, func, insert, select, tuple_ +from sqlalchemy import delete, func, insert, select, tuple_, update from sqlalchemy.exc import OperationalError from sqlalchemy.orm import joinedload, load_only +from airflow._shared.timezones.timezone import utcnow from airflow.assets.manager import asset_manager from airflow.models.asset import ( AssetActive, @@ -44,6 +45,7 @@ DagScheduleAssetNameReference, DagScheduleAssetReference, DagScheduleAssetUriReference, + TaskInletAssetReference, TaskOutletAssetReference, ) from airflow.models.dag import DAG, DagModel, DagOwnerAttributes, DagTag @@ -55,7 +57,6 @@ from airflow.triggers.base import BaseEventTrigger from airflow.utils.retries import MAX_DB_RETRIES, run_with_db_retries from airflow.utils.sqlalchemy import with_row_locks -from airflow.utils.timezone import utcnow from airflow.utils.types import DagRunType if TYPE_CHECKING: @@ -199,7 +200,7 @@ def _serialize_dag_capturing_errors( if not dag_was_updated: # Check and update DagCode DagCode.update_source_code(dag.dag_id, dag.fileloc) - elif "FabAuthManager" in conf.get("core", "auth_manager"): + if "FabAuthManager" in conf.get("core", "auth_manager"): _sync_dag_perms(dag, session=session) return [] @@ -208,8 +209,12 @@ def _serialize_dag_capturing_errors( except Exception: log.exception("Failed to write serialized DAG dag_id=%s fileloc=%s", dag.dag_id, dag.fileloc) dagbag_import_error_traceback_depth = conf.getint("core", "dagbag_import_error_traceback_depth") - # todo AIP-66: this needs to use bundle name / rel fileloc instead - return [(dag.fileloc, traceback.format_exc(limit=-dagbag_import_error_traceback_depth))] + return [ + ( + (bundle_name, dag.relative_fileloc), + traceback.format_exc(limit=-dagbag_import_error_traceback_depth), + ) + ] def _sync_dag_perms(dag: MaybeSerializedDAG, session: Session): @@ -245,7 +250,10 @@ def _update_dag_warnings( def _update_import_errors( - files_parsed: set[str], bundle_name: str, import_errors: dict[str, str], session: Session + files_parsed: set[tuple[str, str]], + bundle_name: str, + import_errors: dict[tuple[str, str], str], + session: Session, ): from airflow.listeners.listener import get_listener_manager @@ -254,58 +262,81 @@ def _update_import_errors( session.execute( delete(ParseImportError).where( - ParseImportError.filename.in_(list(files_parsed)), ParseImportError.bundle_name == bundle_name + tuple_(ParseImportError.bundle_name, ParseImportError.filename).in_(files_parsed) ) ) + # the below query has to match (bundle_name, filename) tuple in that order since the + # import_errors list is a dict with keys as (bundle_name, relative_fileloc) existing_import_error_files = set( - session.execute(select(ParseImportError.filename, ParseImportError.bundle_name)) + session.execute(select(ParseImportError.bundle_name, ParseImportError.filename)) ) - # Add the errors of the processed files - for filename, stacktrace in import_errors.items(): - if (filename, bundle_name) in existing_import_error_files: - session.query(ParseImportError).where( - ParseImportError.filename == filename, ParseImportError.bundle_name == bundle_name - ).update( - { - "filename": filename, - "bundle_name": bundle_name, - "timestamp": utcnow(), - "stacktrace": stacktrace, - }, + for key, stacktrace in import_errors.items(): + bundle_name_, relative_fileloc = key + + if key in existing_import_error_files: + session.execute( + update(ParseImportError) + .where( + ParseImportError.filename == relative_fileloc, + ParseImportError.bundle_name == bundle_name_, + ) + .values( + filename=relative_fileloc, + bundle_name=bundle_name_, + timestamp=utcnow(), + stacktrace=stacktrace, + ), ) # sending notification when an existing dag import error occurs try: + # todo: make listener accept bundle_name and relative_filename + import_error = session.scalar( + select(ParseImportError).where( + ParseImportError.bundle_name == bundle_name_, + ParseImportError.filename == relative_fileloc, + ) + ) get_listener_manager().hook.on_existing_dag_import_error( - filename=filename, stacktrace=stacktrace + filename=import_error.full_file_path(), stacktrace=stacktrace ) except Exception: log.exception("error calling listener") else: - session.add( - ParseImportError( - filename=filename, - bundle_name=bundle_name, - timestamp=utcnow(), - stacktrace=stacktrace, - ) + import_error = ParseImportError( + filename=relative_fileloc, + bundle_name=bundle_name, + timestamp=utcnow(), + stacktrace=stacktrace, ) + session.add(import_error) # sending notification when a new dag import error occurs try: - get_listener_manager().hook.on_new_dag_import_error(filename=filename, stacktrace=stacktrace) + get_listener_manager().hook.on_new_dag_import_error( + filename=import_error.full_file_path(), stacktrace=stacktrace + ) except Exception: log.exception("error calling listener") - session.query(DagModel).filter( - DagModel.fileloc == filename, DagModel.bundle_name == bundle_name - ).update({"has_import_errors": True}) + session.execute( + update(DagModel) + .where( + DagModel.relative_fileloc == relative_fileloc, + ) + .values( + has_import_errors=True, + bundle_name=bundle_name, + is_stale=True, + ) + .execution_options(synchronize_session="fetch") + ) def update_dag_parsing_results_in_db( bundle_name: str, bundle_version: str | None, dags: Collection[MaybeSerializedDAG], - import_errors: dict[str, str], + import_errors: dict[tuple[str, str], str], warnings: set[DagWarning], session: Session, *, @@ -355,13 +386,16 @@ def update_dag_parsing_results_in_db( # Only now we are "complete" do we update import_errors - don't want to record errors from # previous failed attempts import_errors.update(dict(serialize_errors)) - # Record import errors into the ORM - we don't retry on this one as it's not as critical that it works try: # TODO: This won't clear errors for files that exist that no longer contain DAGs. Do we need to pass # in the list of file parsed? - good_dag_filelocs = {dag.fileloc for dag in dags if dag.fileloc not in import_errors} + good_dag_filelocs = { + (bundle_name, dag.relative_fileloc) + for dag in dags + if dag.relative_fileloc is not None and (bundle_name, dag.relative_fileloc) not in import_errors + } _update_import_errors( files_parsed=good_dag_filelocs, bundle_name=bundle_name, @@ -459,6 +493,9 @@ def update_dags( "core", "max_consecutive_failed_dag_runs_per_dag" ) + if dag.deadline is not None: + dm.deadline = dag.deadline + if hasattr(dag, "has_task_concurrency_limits"): dm.has_task_concurrency_limits = dag.has_task_concurrency_limits else: @@ -572,7 +609,7 @@ def _find_active_assets(name_uri_assets: Iterable[tuple[str, str]], session: Ses select(AssetModel.name, AssetModel.uri).where( tuple_(AssetModel.name, AssetModel.uri).in_(name_uri_assets), AssetModel.active.has(), - AssetModel.consuming_dags.any( + AssetModel.scheduled_dags.any( DagScheduleAssetReference.dag.has(~DagModel.is_stale & ~DagModel.is_paused) ), ) @@ -587,6 +624,7 @@ class AssetModelOperation(NamedTuple): schedule_asset_alias_references: dict[str, list[AssetAlias]] schedule_asset_name_references: set[tuple[str, str]] # dag_id, ref_name. schedule_asset_uri_references: set[tuple[str, str]] # dag_id, ref_uri. + inlet_references: dict[str, list[tuple[str, Asset]]] outlet_references: dict[str, list[tuple[str, Asset]]] assets: dict[tuple[str, str], Asset] asset_aliases: dict[str, AssetAlias] @@ -614,6 +652,9 @@ def collect(cls, dags: dict[str, MaybeSerializedDAG]) -> Self: for ref in dag.timetable.asset_condition.iter_asset_refs() if isinstance(ref, AssetUriRef) }, + inlet_references={ + dag_id: list(dag.get_task_assets(inlets=True, outlets=False)) for dag_id, dag in dags.items() + }, outlet_references={ dag_id: list(dag.get_task_assets(inlets=False, outlets=True)) for dag_id, dag in dags.items() }, @@ -793,6 +834,24 @@ def add_task_asset_references( # Optimization: No assets means there are no references to update. if not assets: return + for dag_id, references in self.inlet_references.items(): + # Optimization: no references at all; this is faster than repeated delete(). + if not references: + dags[dag_id].task_inlet_asset_references = [] + continue + referenced_inlets = { + (task_id, asset.id) + for task_id, asset in ((task_id, assets[d.name, d.uri]) for task_id, d in references) + } + orm_refs = {(r.task_id, r.asset_id): r for r in dags[dag_id].task_inlet_asset_references} + for key, ref in orm_refs.items(): + if key not in referenced_inlets: + session.delete(ref) + session.bulk_save_objects( + TaskInletAssetReference(asset_id=asset_id, dag_id=dag_id, task_id=task_id) + for task_id, asset_id in referenced_inlets + if (task_id, asset_id) not in orm_refs + ) for dag_id, references in self.outlet_references.items(): # Optimization: no references at all; this is faster than repeated delete(). if not references: @@ -913,7 +972,7 @@ def add_asset_trigger_references( # Remove references from assets no longer used orphan_assets = session.scalars( - select(AssetModel).filter(~AssetModel.consuming_dags.any()).filter(AssetModel.triggers.any()) + select(AssetModel).filter(~AssetModel.scheduled_dags.any()).filter(AssetModel.triggers.any()) ) for asset_model in orphan_assets: if (asset_model.name, asset_model.uri) not in self.assets: diff --git a/airflow-core/src/airflow/dag_processing/manager.py b/airflow-core/src/airflow/dag_processing/manager.py index b8ddf688dcb2a..4557e52a46536 100644 --- a/airflow-core/src/airflow/dag_processing/manager.py +++ b/airflow-core/src/airflow/dag_processing/manager.py @@ -48,6 +48,8 @@ from uuid6 import uuid7 import airflow.models +from airflow._shared.timezones import timezone +from airflow.api_fastapi.execution_api.app import InProcessExecutionAPI from airflow.configuration import conf from airflow.dag_processing.bundles.manager import DagBundlesManager from airflow.dag_processing.collection import update_dag_parsing_results_in_db @@ -60,11 +62,10 @@ from airflow.models.dagwarning import DagWarning from airflow.models.db_callback_request import DbCallbackRequest from airflow.models.errors import ParseImportError +from airflow.sdk import SecretCache from airflow.sdk.log import init_log_file, logging_processors -from airflow.secrets.cache import SecretCache from airflow.stats import Stats -from airflow.traces.tracer import Trace -from airflow.utils import timezone +from airflow.traces.tracer import DebugTrace from airflow.utils.file import list_py_file_paths, might_contain_dag from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname @@ -76,10 +77,13 @@ from airflow.utils.sqlalchemy import prohibit_commit, with_row_locks if TYPE_CHECKING: + from socket import socket + from sqlalchemy.orm import Session from airflow.callbacks.callback_requests import CallbackRequest from airflow.dag_processing.bundles.base import BaseDagBundle + from airflow.sdk.api.client import Client class DagParsingStat(NamedTuple): @@ -135,6 +139,16 @@ def _resolve_path(instance: Any, attribute: attrs.Attribute, val: str | os.PathL return val +def utc_epoch() -> datetime: + # pendulum utcnow() is not used as that sets a TimezoneInfo object + # instead of a Timezone. This is not picklable and also creates issues + # when using replace() + result = datetime(1970, 1, 1) + result = result.replace(tzinfo=timezone.utc) + + return result + + @attrs.define(kw_only=True) class DagFileProcessorManager(LoggingMixin): """ @@ -147,7 +161,7 @@ class DagFileProcessorManager(LoggingMixin): over again, but no more often than the specified interval. :param max_runs: The number of times to parse each file. -1 for unlimited. - :param bundles_names_to_parse: List of bundle names to parse. If None, all bundles are parsed. + :param bundle_names_to_parse: List of bundle names to parse. If None, all bundles are parsed. :param processor_timeout: How long to wait before timing out a DAG file processor """ @@ -213,6 +227,9 @@ class DagFileProcessorManager(LoggingMixin): _force_refresh_bundles: set[str] = attrs.field(factory=set, init=False) """List of bundles that need to be force refreshed in the next loop""" + _api_server: InProcessExecutionAPI = attrs.field(init=False, factory=InProcessExecutionAPI) + """API server to interact with Metadata DB""" + def register_exit_signals(self): """Register signals that stop child processes.""" signal.signal(signal.SIGINT, self._exit_gracefully) @@ -383,17 +400,18 @@ def _service_processor_sockets(self, timeout: float | None = 1.0): """ events = self.selector.select(timeout=timeout) for key, _ in events: - socket_handler = key.data + socket_handler, on_close = key.data # BrokenPipeError should be caught and treated as if the handler returned false, similar # to EOF case try: need_more = socket_handler(key.fileobj) - except BrokenPipeError: + except (BrokenPipeError, ConnectionResetError): need_more = False if not need_more: - self.selector.unregister(key.fileobj) - key.fileobj.close() # type: ignore[union-attr] + sock: socket = key.fileobj # type: ignore[assignment] + on_close(sock) + sock.close() def _queue_requested_files_for_parsing(self) -> None: """Queue any files requested for parsing as requested by users via UI/API.""" @@ -500,7 +518,7 @@ def _refresh_dag_bundles(self, known_files: dict[str, set[DagFileInfo]]): with create_session() as session: bundle_model: DagBundleModel = session.get(DagBundleModel, bundle.name) elapsed_time_since_refresh = ( - now - (bundle_model.last_refreshed or timezone.utc_epoch()) + now - (bundle_model.last_refreshed or utc_epoch()) ).total_seconds() if bundle.supports_versioning: # we will also check the version of the bundle to see if another DAG processor has seen @@ -568,7 +586,7 @@ def _refresh_dag_bundles(self, known_files: dict[str, set[DagFileInfo]]): self.deactivate_deleted_dags(bundle_name=bundle.name, present=found_files) self.clear_orphaned_import_errors( bundle_name=bundle.name, - observed_filelocs={str(x.absolute_path) for x in found_files}, # todo: make relative + observed_filelocs={str(x.rel_path) for x in found_files}, # todo: make relative ) def _find_files_in_bundle(self, bundle: BaseDagBundle) -> list[Path]: @@ -679,7 +697,7 @@ def _log_file_processing_stats(self, known_files: dict[str, set[DagFileInfo]]): rows = [] utcnow = timezone.utcnow() - now = time.time() + now = time.monotonic() for files in known_files.values(): for file in files: @@ -710,7 +728,7 @@ def _log_file_processing_stats(self, known_files: dict[str, set[DagFileInfo]]): ) # Sort by longest last runtime. (Can't sort None values in python3) - rows.sort(key=lambda x: x[5] or 0.0, reverse=True) + rows.sort(key=lambda x: x[6] or 0.0, reverse=True) formatted_rows = [] for ( @@ -806,7 +824,7 @@ def _collect_results(self, session: Session = NEW_SESSION): # Collect the DAGS and import errors into the DB, emit metrics etc. self._file_stats[file] = process_parse_results( - run_duration=time.time() - proc.start_time, + run_duration=time.monotonic() - proc.start_time, finish_time=timezone.utcnow(), run_count=self._file_stats[file].run_count, bundle_name=file.bundle_name, @@ -867,6 +885,15 @@ def _get_logger_for_dag_file(self, dag_file: DagFileInfo): underlying_logger, processors=processors, logger_name="processor" ).bind(), logger_filehandle + @functools.cached_property + def client(self) -> Client: + from airflow.sdk.api.client import Client + + client = Client(base_url=None, token="", dry_run=True, transport=self._api_server.transport) + # Mypy is wrong -- the setter accepts a string on the property setter! `URLType = URL | str` + client.base_url = "http://in-process.invalid./" + return client + def _create_process(self, dag_file: DagFileInfo) -> DagFileProcessorProcess: id = uuid7() @@ -881,6 +908,7 @@ def _create_process(self, dag_file: DagFileInfo) -> DagFileProcessorProcess: selector=self.selector, logger=logger, logger_filehandle=logger_filehandle, + client=self.client, ) def _start_new_processes(self): @@ -992,7 +1020,7 @@ def prepare_file_queue(self, known_files: dict[str, set[DagFileInfo]]): def _kill_timed_out_processors(self): """Kill any file processors that timeout to defend against process hangs.""" - now = time.time() + now = time.monotonic() processors_to_remove = [] for file, processor in self._processors.items(): duration = now - processor.start_time @@ -1062,7 +1090,7 @@ def emit_metrics(self): This is called once every time around the parsing "loop" - i.e. after all files have been parsed. """ - with Trace.start_span(span_name="emit_metrics", component="DagFileProcessorManager") as span: + with DebugTrace.start_span(span_name="emit_metrics", component="DagFileProcessorManager") as span: parse_time = time.perf_counter() - self._parsing_start_time Stats.gauge("dag_processing.total_parse_time", parse_time) Stats.gauge("dagbag_size", sum(stat.num_dags for stat in self._file_stats.values())) @@ -1095,7 +1123,7 @@ def reload_configuration_for_dag_processing(): # iterating on https://github.com/apache/airflow/pull/19860 # The issue that describes the problem and possible remediation is # at https://github.com/apache/airflow/issues/19934 - importlib.reload(import_module(airflow.settings.LOGGING_CLASS_PATH.rsplit(".", 1)[0])) # type: ignore + importlib.reload(import_module(airflow.settings.LOGGING_CLASS_PATH.rsplit(".", 1)[0])) importlib.reload(airflow.settings) airflow.settings.initialize() del os.environ["CONFIG_PROCESSOR_MANAGER_LOGGER"] @@ -1126,11 +1154,16 @@ def process_parse_results( stat.import_errors = 1 else: # record DAGs and import errors to database + import_errors = {} + if parsing_result.import_errors: + import_errors = { + (bundle_name, rel_path): error for rel_path, error in parsing_result.import_errors.items() + } update_dag_parsing_results_in_db( bundle_name=bundle_name, bundle_version=bundle_version, dags=parsing_result.serialized_dags, - import_errors=parsing_result.import_errors or {}, + import_errors=import_errors, warnings=set(parsing_result.warnings or []), session=session, ) diff --git a/airflow-core/src/airflow/dag_processing/processor.py b/airflow-core/src/airflow/dag_processing/processor.py index 25f9e2a73ed87..ec2ad8699e953 100644 --- a/airflow-core/src/airflow/dag_processing/processor.py +++ b/airflow-core/src/airflow/dag_processing/processor.py @@ -16,12 +16,14 @@ # under the License. from __future__ import annotations -import functools +import contextlib +import importlib import os import sys import traceback +from collections.abc import Callable, Sequence from pathlib import Path -from typing import TYPE_CHECKING, Annotated, BinaryIO, Callable, ClassVar, Literal, Union +from typing import TYPE_CHECKING, Annotated, BinaryIO, ClassVar, Literal import attrs from pydantic import BaseModel, Field, TypeAdapter @@ -38,14 +40,21 @@ DeleteVariable, ErrorResponse, GetConnection, + GetPreviousDagRun, + GetPrevSuccessfulDagRun, GetVariable, OKResponse, + PreviousDagRunResult, + PrevSuccessfulDagRunResult, PutVariable, VariableResult, ) from airflow.sdk.execution_time.supervisor import WatchedSubprocess +from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance from airflow.serialization.serialized_objects import LazyDeserializedDAG, SerializedDAG from airflow.stats import Stats +from airflow.utils.file import iter_airflow_imports +from airflow.utils.state import TaskInstanceState if TYPE_CHECKING: from structlog.typing import FilteringBoundLogger @@ -55,50 +64,117 @@ from airflow.sdk.definitions.context import Context from airflow.typing_compat import Self + +class DagFileParseRequest(BaseModel): + """ + Request for DAG File Parsing. + + This is the request that the manager will send to the DAG parser with the dag file and + any other necessary metadata. + """ + + file: str + + bundle_path: Path + """Passing bundle path around lets us figure out relative file path.""" + + callback_requests: list[CallbackRequest] = Field(default_factory=list) + type: Literal["DagFileParseRequest"] = "DagFileParseRequest" + + +class DagFileParsingResult(BaseModel): + """ + Result of DAG File Parsing. + + This is the result of a successful DAG parse, in this class, we gather all serialized DAGs, + import errors and warnings to send back to the scheduler to store in the DB. + """ + + fileloc: str + serialized_dags: list[LazyDeserializedDAG] + warnings: list | None = None + import_errors: dict[str, str] | None = None + type: Literal["DagFileParsingResult"] = "DagFileParsingResult" + + ToManager = Annotated[ - Union["DagFileParsingResult", GetConnection, GetVariable, PutVariable, DeleteVariable], + DagFileParsingResult + | GetConnection + | GetVariable + | PutVariable + | DeleteVariable + | GetPrevSuccessfulDagRun + | GetPreviousDagRun, Field(discriminator="type"), ] ToDagProcessor = Annotated[ - Union["DagFileParseRequest", ConnectionResult, VariableResult, ErrorResponse, OKResponse], + DagFileParseRequest + | ConnectionResult + | VariableResult + | PreviousDagRunResult + | PrevSuccessfulDagRunResult + | ErrorResponse + | OKResponse, Field(discriminator="type"), ] -def _parse_file_entrypoint(): - import os +def _pre_import_airflow_modules(file_path: str, log: FilteringBoundLogger) -> None: + """ + Pre-import Airflow modules found in the given file. + + This prevents modules from being re-imported in each processing process, + saving CPU time and memory. + (The default value of "parsing_pre_import_modules" is set to True) + :param file_path: Path to the file to scan for imports + :param log: Logger instance to use for warnings + """ + if not conf.getboolean("dag_processor", "parsing_pre_import_modules", fallback=True): + return + + for module in iter_airflow_imports(file_path): + try: + importlib.import_module(module) + except ModuleNotFoundError as e: + log.warning("Error when trying to pre-import module '%s' found in %s: %s", module, file_path, e) + + +def _parse_file_entrypoint(): import structlog - from airflow.sdk.execution_time import task_runner + from airflow.sdk.execution_time import comms, task_runner # Parse DAG file, send JSON back up! - comms_decoder = task_runner.CommsDecoder[ToDagProcessor, ToManager]( - input=sys.stdin, - decoder=TypeAdapter[ToDagProcessor](ToDagProcessor), + comms_decoder = comms.CommsDecoder[ToDagProcessor, ToManager]( + body_decoder=TypeAdapter[ToDagProcessor](ToDagProcessor), ) - msg = comms_decoder.get_message() + msg = comms_decoder._get_response() if not isinstance(msg, DagFileParseRequest): raise RuntimeError(f"Required first message to be a DagFileParseRequest, it was {msg}") - comms_decoder.request_socket = os.fdopen(msg.requests_fd, "wb", buffering=0) task_runner.SUPERVISOR_COMMS = comms_decoder log = structlog.get_logger(logger_name="task") + # Put bundle root on sys.path if needed. This allows the dag bundle to add + # code in util modules to be shared between files within the same bundle. + if (bundle_root := os.fspath(msg.bundle_path)) not in sys.path: + sys.path.append(bundle_root) + result = _parse_file(msg, log) if result is not None: - comms_decoder.send_request(log, result) + comms_decoder.send(result) def _parse_file(msg: DagFileParseRequest, log: FilteringBoundLogger) -> DagFileParsingResult | None: # TODO: Set known_pool names on DagBag! + bag = DagBag( dag_folder=msg.file, bundle_path=msg.bundle_path, include_examples=False, - safe_mode=True, load_op_links=False, ) if msg.callback_requests: @@ -143,15 +219,14 @@ def _execute_callbacks( for request in callback_requests: log.debug("Processing Callback Request", request=request.to_json()) if isinstance(request, TaskCallbackRequest): - raise NotImplementedError( - "Haven't coded Task callback yet - https://github.com/apache/airflow/issues/44354!" - ) - # _execute_task_callbacks(dagbag, request) + _execute_task_callbacks(dagbag, request, log) if isinstance(request, DagCallbackRequest): _execute_dag_callbacks(dagbag, request, log) def _execute_dag_callbacks(dagbag: DagBag, request: DagCallbackRequest, log: FilteringBoundLogger) -> None: + from airflow.sdk.api.datamodels._generated import TIRunContext + dag = dagbag.dags[request.dag_id] callbacks = dag.on_failure_callback if request.is_failure_callback else dag.on_success_callback @@ -160,8 +235,27 @@ def _execute_dag_callbacks(dagbag: DagBag, request: DagCallbackRequest, log: Fil return callbacks = callbacks if isinstance(callbacks, list) else [callbacks] - # TODO:We need a proper context object! - context: Context = {} # type: ignore[assignment] + ctx_from_server = request.context_from_server + + if ctx_from_server is not None and ctx_from_server.last_ti is not None: + task = dag.get_task(ctx_from_server.last_ti.task_id) + + runtime_ti = RuntimeTaskInstance.model_construct( + **ctx_from_server.last_ti.model_dump(exclude_unset=True), + task=task, + _ti_context_from_server=TIRunContext.model_construct( + dag_run=ctx_from_server.dag_run, + max_tries=task.retries, + ), + ) + context = runtime_ti.get_template_context() + context["reason"] = request.msg + else: + context: Context = { # type: ignore[no-redef] + "dag": dag, + "run_id": request.run_id, + "reason": request.msg, + } for callback in callbacks: log.info( @@ -176,37 +270,65 @@ def _execute_dag_callbacks(dagbag: DagBag, request: DagCallbackRequest, log: Fil Stats.incr("dag.callback_exceptions", tags={"dag_id": request.dag_id}) -class DagFileParseRequest(BaseModel): - """ - Request for DAG File Parsing. - - This is the request that the manager will send to the DAG parser with the dag file and - any other necessary metadata. - """ - - file: str - - bundle_path: Path - """Passing bundle path around lets us figure out relative file path.""" +def _execute_task_callbacks(dagbag: DagBag, request: TaskCallbackRequest, log: FilteringBoundLogger) -> None: + if not request.is_failure_callback: + log.warning( + "Task callback requested but is not a failure callback", + dag_id=request.ti.dag_id, + task_id=request.ti.task_id, + run_id=request.ti.run_id, + ) + return - requests_fd: int - callback_requests: list[CallbackRequest] = Field(default_factory=list) - type: Literal["DagFileParseRequest"] = "DagFileParseRequest" + dag = dagbag.dags[request.ti.dag_id] + task = dag.get_task(request.ti.task_id) + if request.task_callback_type is TaskInstanceState.UP_FOR_RETRY: + callbacks = task.on_retry_callback + else: + callbacks = task.on_failure_callback -class DagFileParsingResult(BaseModel): - """ - Result of DAG File Parsing. + if not callbacks: + log.warning( + "Callback requested but no callback found", + dag_id=request.ti.dag_id, + task_id=request.ti.task_id, + run_id=request.ti.run_id, + ti_id=request.ti.id, + ) + return - This is the result of a successful DAG parse, in this class, we gather all serialized DAGs, - import errors and warnings to send back to the scheduler to store in the DB. - """ + callbacks = callbacks if isinstance(callbacks, Sequence) else [callbacks] + ctx_from_server = request.context_from_server - fileloc: str - serialized_dags: list[LazyDeserializedDAG] - warnings: list | None = None - import_errors: dict[str, str] | None = None - type: Literal["DagFileParsingResult"] = "DagFileParsingResult" + if ctx_from_server is not None: + runtime_ti = RuntimeTaskInstance.model_construct( + **request.ti.model_dump(exclude_unset=True), + task=task, + _ti_context_from_server=ctx_from_server, + max_tries=ctx_from_server.max_tries, + ) + else: + runtime_ti = RuntimeTaskInstance.model_construct( + **request.ti.model_dump(exclude_unset=True), + task=task, + ) + context = runtime_ti.get_template_context() + + def get_callback_representation(callback): + with contextlib.suppress(AttributeError): + return callback.__name__ + with contextlib.suppress(AttributeError): + return callback.__class__.__name__ + return callback + + for idx, callback in enumerate(callbacks): + callback_repr = get_callback_representation(callback) + log.info("Executing Task callback at index %d: %s", idx, callback_repr) + try: + callback(context) + except Exception: + log.exception("Error in callback at index %d: %s", idx, callback_repr) def in_process_api_server() -> InProcessExecutionAPI: @@ -232,6 +354,9 @@ class DagFileProcessorProcess(WatchedSubprocess): parsing_result: DagFileParsingResult | None = None decoder: ClassVar[TypeAdapter[ToManager]] = TypeAdapter[ToManager](ToManager) + client: Client + """The HTTP client to use for communication with the API server.""" + @classmethod def start( # type: ignore[override] cls, @@ -240,9 +365,14 @@ def start( # type: ignore[override] bundle_path: Path, callbacks: list[CallbackRequest], target: Callable[[], None] = _parse_file_entrypoint, + client: Client, **kwargs, ) -> Self: - proc: Self = super().start(target=target, **kwargs) + logger = kwargs["logger"] + + _pre_import_airflow_modules(os.fspath(path), logger) + + proc: Self = super().start(target=target, client=client, **kwargs) proc._on_child_started(callbacks, path, bundle_path) return proc @@ -255,29 +385,18 @@ def _on_child_started( msg = DagFileParseRequest( file=os.fspath(path), bundle_path=bundle_path, - requests_fd=self._requests_fd, callback_requests=callbacks, ) - self.send_msg(msg) - - @functools.cached_property - def client(self) -> Client: - from airflow.sdk.api.client import Client - - client = Client(base_url=None, token="", dry_run=True, transport=in_process_api_server().transport) - # Mypy is wrong -- the setter accepts a string on the property setter! `URLType = URL | str` - client.base_url = "http://in-process.invalid./" # type: ignore[assignment] - return client + self.send_msg(msg, request_id=0) - def _handle_request(self, msg: ToManager, log: FilteringBoundLogger) -> None: # type: ignore[override] + def _handle_request(self, msg: ToManager, log: FilteringBoundLogger, req_id: int) -> None: from airflow.sdk.api.datamodels._generated import ConnectionResponse, VariableResponse resp: BaseModel | None = None dump_opts = {} if isinstance(msg, DagFileParsingResult): self.parsing_result = msg - return - if isinstance(msg, GetConnection): + elif isinstance(msg, GetConnection): conn = self.client.connections.get(msg.conn_id) if isinstance(conn, ConnectionResponse): conn_result = ConnectionResult.from_conn_response(conn) @@ -297,12 +416,29 @@ def _handle_request(self, msg: ToManager, log: FilteringBoundLogger) -> None: # self.client.variables.set(msg.key, msg.value, msg.description) elif isinstance(msg, DeleteVariable): resp = self.client.variables.delete(msg.key) + elif isinstance(msg, GetPreviousDagRun): + resp = self.client.dag_runs.get_previous( + dag_id=msg.dag_id, + logical_date=msg.logical_date, + state=msg.state, + ) + elif isinstance(msg, GetPrevSuccessfulDagRun): + dagrun_resp = self.client.task_instances.get_previous_successful_dagrun(self.id) + dagrun_result = PrevSuccessfulDagRunResult.from_dagrun_response(dagrun_resp) + resp = dagrun_result + dump_opts = {"exclude_unset": True} else: log.error("Unhandled request", msg=msg) + self.send_msg( + None, + request_id=req_id, + error=ErrorResponse( + detail={"status_code": 400, "message": "Unhandled request"}, + ), + ) return - if resp: - self.send_msg(resp, **dump_opts) + self.send_msg(resp, request_id=req_id, error=None, **dump_opts) @property def is_ready(self) -> bool: @@ -310,11 +446,7 @@ def is_ready(self) -> bool: # Process still alive, def can't be finished yet return False - return self._num_open_sockets == 0 - - @property - def start_time(self) -> float: - return self._process.create_time() + return not self._open_sockets def wait(self) -> int: raise NotImplementedError(f"Don't call wait on {type(self).__name__} objects") diff --git a/airflow-core/src/airflow/datasets/__init__.py b/airflow-core/src/airflow/datasets/__init__.py index d082d6fbed854..d0622b67b1f19 100644 --- a/airflow-core/src/airflow/datasets/__init__.py +++ b/airflow-core/src/airflow/datasets/__init__.py @@ -32,7 +32,7 @@ _names_moved = { "DatasetAlias": ("airflow.sdk.definitions.asset", "AssetAlias"), "DatasetAll": ("airflow.sdk.definitions.asset", "AssetAll"), - "DatasetAny": ("airflow.sdk.definitions.asset", "DatasetAny"), + "DatasetAny": ("airflow.sdk.definitions.asset", "AssetAny"), "Dataset": ("airflow.sdk.definitions.asset", "Asset"), "expand_alias_to_datasets": ("airflow.models.asset", "expand_alias_to_assets"), } diff --git a/airflow-core/src/airflow/decorators/__init__.py b/airflow-core/src/airflow/decorators/__init__.py index 86f82799796a5..c648e0e3dcabb 100644 --- a/airflow-core/src/airflow/decorators/__init__.py +++ b/airflow-core/src/airflow/decorators/__init__.py @@ -16,22 +16,21 @@ # under the License. from __future__ import annotations -from airflow.sdk.definitions.decorators import ( - TaskDecorator as TaskDecorator, - TaskDecoratorCollection as TaskDecoratorCollection, - dag as dag, - setup as setup, - task as task, - task_group as task_group, - teardown as teardown, -) from airflow.utils.deprecation_tools import add_deprecated_classes __deprecated_classes = { + __name__: { + "dag": "airflow.sdk.dag", + "setup": "airflow.sdk.setup", + "task": "airflow.sdk.task", + "task_group": "airflow.sdk.task_group", + "teardown": "airflow.sdk.teardown", + }, "base": { "DecoratedMappedOperator": "airflow.sdk.bases.decorator.DecoratedMappedOperator", "DecoratedOperator": "airflow.sdk.bases.decorator.DecoratedOperator", "TaskDecorator": "airflow.sdk.bases.decorator.TaskDecorator", + "TaskDecoratorCollection": "airflow.sdk.definitions.decorators.TaskDecoratorCollection", "get_unique_task_id": "airflow.sdk.bases.decorator.get_unique_task_id", "task_decorator_factory": "airflow.sdk.bases.decorator.task_decorator_factory", }, diff --git a/airflow-core/src/airflow/example_dags/example_asset_alias.py b/airflow-core/src/airflow/example_dags/example_asset_alias.py index d0a6a20188a92..5c4df1aa09c35 100644 --- a/airflow-core/src/airflow/example_dags/example_asset_alias.py +++ b/airflow-core/src/airflow/example_dags/example_asset_alias.py @@ -32,6 +32,7 @@ from __future__ import annotations +# [START example_asset_alias] import pendulum from airflow.sdk import DAG, Asset, AssetAlias, task @@ -94,3 +95,4 @@ def consume_asset_event_from_asset_alias(*, inlet_events=None): print(event) consume_asset_event_from_asset_alias() +# [END example_asset_alias] diff --git a/airflow-core/src/airflow/example_dags/example_assets.py b/airflow-core/src/airflow/example_dags/example_assets.py index 2bb3cffc527f8..3ab372112585c 100644 --- a/airflow-core/src/airflow/example_dags/example_assets.py +++ b/airflow-core/src/airflow/example_dags/example_assets.py @@ -52,6 +52,7 @@ from __future__ import annotations +# [START asset_def] import pendulum from airflow.providers.standard.operators.bash import BashOperator @@ -59,9 +60,7 @@ from airflow.timetables.assets import AssetOrTimeSchedule from airflow.timetables.trigger import CronTriggerTimetable -# [START asset_def] dag1_asset = Asset("s3://dag1/output_1.txt", extra={"hi": "bye"}) -# [END asset_def] dag2_asset = Asset("s3://dag2/output_1.txt", extra={"hi": "bye"}) dag3_asset = Asset("s3://dag3/output_3.txt", extra={"hi": "bye"}) @@ -189,3 +188,4 @@ task_id="conditional_asset_and_time_based_timetable", bash_command="sleep 5", ) +# [END asset_def] diff --git a/airflow-core/src/airflow/example_dags/example_dag_decorator.py b/airflow-core/src/airflow/example_dags/example_dag_decorator.py index c1f5b39233231..9f6e637c9178e 100644 --- a/airflow-core/src/airflow/example_dags/example_dag_decorator.py +++ b/airflow-core/src/airflow/example_dags/example_dag_decorator.py @@ -17,14 +17,16 @@ # under the License. from __future__ import annotations +import ipaddress + +# [START dag_decorator_usage] from typing import TYPE_CHECKING, Any import httpx import pendulum -from airflow.models.baseoperator import BaseOperator from airflow.providers.standard.operators.bash import BashOperator -from airflow.sdk import dag, task +from airflow.sdk import BaseOperator, dag, task if TYPE_CHECKING: from airflow.sdk import Context @@ -43,7 +45,6 @@ def execute(self, context: Context): return httpx.get(self.url).json() -# [START dag_decorator_usage] @dag( schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), @@ -61,9 +62,13 @@ def example_dag_decorator(url: str = "http://httpbin.org/get"): @task(multiple_outputs=True) def prepare_command(raw_json: dict[str, Any]) -> dict[str, str]: external_ip = raw_json["origin"] - return { - "command": f"echo 'Seems like today your server executing Airflow is connected from IP {external_ip}'", - } + try: + ipaddress.ip_address(external_ip) + return { + "command": f"echo 'Seems like today your server executing Airflow is connected from IP {external_ip}'", + } + except ValueError: + raise ValueError(f"Invalid IP address: '{external_ip}'.") command_info = prepare_command(get_ip.output) diff --git a/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping.py b/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping.py index 9f4f45511cf04..750c3da1ec17b 100644 --- a/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping.py +++ b/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping.py @@ -19,6 +19,7 @@ from __future__ import annotations +# [START example_dynamic_task_mapping] from datetime import datetime from airflow.sdk import DAG, task @@ -56,3 +57,5 @@ def add_10(num): _get_nums = get_nums() _times_2 = times_2.expand(num=_get_nums) add_10.expand(num=_times_2) + +# [END example_dynamic_task_mapping] diff --git a/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py b/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py index c762eee74f96e..07cd653d29b14 100644 --- a/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py +++ b/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py @@ -21,8 +21,7 @@ from datetime import datetime -from airflow.models.baseoperator import BaseOperator -from airflow.sdk import DAG +from airflow.sdk import DAG, BaseOperator class AddOneOperator(BaseOperator): diff --git a/airflow-core/src/airflow/example_dags/example_params_ui_tutorial.py b/airflow-core/src/airflow/example_dags/example_params_ui_tutorial.py index da6b4c585b1d3..40e4667d35b6f 100644 --- a/airflow-core/src/airflow/example_dags/example_params_ui_tutorial.py +++ b/airflow-core/src/airflow/example_dags/example_params_ui_tutorial.py @@ -58,7 +58,7 @@ type="integer", title="Your favorite number", description_md="Everybody should have a **favorite** number. Not only _math teachers_. " - "If you can not think of any at the moment please think of the 42 which is very famous because" + "If you can not think of any at the moment please think of the 42 which is very famous because " "of the book [The Hitchhiker's Guide to the Galaxy]" "(https://en.wikipedia.org/wiki/Phrases_from_The_Hitchhiker%27s_Guide_to_the_Galaxy#" "The_Answer_to_the_Ultimate_Question_of_Life,_the_Universe,_and_Everything_is_42).", @@ -99,7 +99,7 @@ format="date", title="Date Picker", description="Please select a date, use the button on the left for a pop-up calendar. " - "See that here are no times!", + "See that there are no times!", section="Typed parameters with Param object", ), "time": Param( @@ -181,7 +181,7 @@ # A multiple options selection can also be combined with values_display "multi_select_with_label": Param( ["2", "3"], - "Select from the list of options. See that options can have nicer text and still technical values" + "Select from the list of options. See that options can have nicer text and still technical values " "are propagated as values during trigger to the DAG.", type="array", title="Multi Select with Labels", diff --git a/airflow-core/src/airflow/example_dags/example_passing_params_via_test_command.py b/airflow-core/src/airflow/example_dags/example_passing_params_via_test_command.py index 5114bea07132e..9e4d3bf477845 100644 --- a/airflow-core/src/airflow/example_dags/example_passing_params_via_test_command.py +++ b/airflow-core/src/airflow/example_dags/example_passing_params_via_test_command.py @@ -46,15 +46,14 @@ def my_py_command(params, test_mode=None, task=None): @task(task_id="env_var_test_task") -def print_env_vars(test_mode=None): +def print_env_vars(): """ Print out the "foo" param passed in via `airflow tasks test example_passing_params_via_test_command env_var_test_task --env-vars '{"foo":"bar"}'` """ - if test_mode: - print(f"foo={os.environ.get('foo')}") - print(f"AIRFLOW_TEST_MODE={os.environ.get('AIRFLOW_TEST_MODE')}") + print(f"foo={os.environ.get('foo')}") + print(f"AIRFLOW_TEST_MODE={os.environ.get('AIRFLOW_TEST_MODE')}") with DAG( diff --git a/airflow-core/src/airflow/example_dags/example_setup_teardown.py b/airflow-core/src/airflow/example_dags/example_setup_teardown.py index a36e79a55e5f5..052377736ea59 100644 --- a/airflow-core/src/airflow/example_dags/example_setup_teardown.py +++ b/airflow-core/src/airflow/example_dags/example_setup_teardown.py @@ -23,7 +23,7 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.sdk import DAG -from airflow.utils.task_group import TaskGroup +from airflow.sdk.definitions.taskgroup import TaskGroup with DAG( dag_id="example_setup_teardown", diff --git a/airflow-core/src/airflow/example_dags/example_setup_teardown_taskflow.py b/airflow-core/src/airflow/example_dags/example_setup_teardown_taskflow.py index e554b4f9cae89..8b68f85ef826d 100644 --- a/airflow-core/src/airflow/example_dags/example_setup_teardown_taskflow.py +++ b/airflow-core/src/airflow/example_dags/example_setup_teardown_taskflow.py @@ -19,6 +19,7 @@ from __future__ import annotations +# [START example_setup_teardown_taskflow] import pendulum from airflow.sdk import DAG, setup, task, task_group, teardown @@ -104,3 +105,4 @@ def inner_teardown(cluster_id): # and let's put section 1 inside the outer setup and teardown tasks section_1() +# [END example_setup_teardown_taskflow] diff --git a/airflow-core/src/airflow/example_dags/example_simplest_dag.py b/airflow-core/src/airflow/example_dags/example_simplest_dag.py index fad6f57950a9e..660f38c2e00e1 100644 --- a/airflow-core/src/airflow/example_dags/example_simplest_dag.py +++ b/airflow-core/src/airflow/example_dags/example_simplest_dag.py @@ -18,6 +18,7 @@ from __future__ import annotations +# [START simplest_dag] from airflow.sdk import dag, task @@ -30,4 +31,6 @@ def my_task(): my_task() +# [END simplest_dag] + example_simplest_dag() diff --git a/airflow-core/src/airflow/example_dags/example_skip_dag.py b/airflow-core/src/airflow/example_dags/example_skip_dag.py index 7575494d0d9ab..8226a6e63bd37 100644 --- a/airflow-core/src/airflow/example_dags/example_skip_dag.py +++ b/airflow-core/src/airflow/example_dags/example_skip_dag.py @@ -25,9 +25,8 @@ import pendulum from airflow.exceptions import AirflowSkipException -from airflow.models.baseoperator import BaseOperator from airflow.providers.standard.operators.empty import EmptyOperator -from airflow.sdk import DAG +from airflow.sdk import DAG, BaseOperator from airflow.utils.trigger_rule import TriggerRule if TYPE_CHECKING: diff --git a/airflow-core/src/airflow/example_dags/example_task_group.py b/airflow-core/src/airflow/example_dags/example_task_group.py index e83ac2e9989cf..c882c269c476b 100644 --- a/airflow-core/src/airflow/example_dags/example_task_group.py +++ b/airflow-core/src/airflow/example_dags/example_task_group.py @@ -24,7 +24,7 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.standard.operators.empty import EmptyOperator from airflow.sdk import DAG -from airflow.utils.task_group import TaskGroup +from airflow.sdk.definitions.taskgroup import TaskGroup # [START howto_task_group] with DAG( diff --git a/airflow-core/src/airflow/example_dags/example_task_group_decorator.py b/airflow-core/src/airflow/example_dags/example_task_group_decorator.py index 580b8bca5226a..5ed2a59ae3b64 100644 --- a/airflow-core/src/airflow/example_dags/example_task_group_decorator.py +++ b/airflow-core/src/airflow/example_dags/example_task_group_decorator.py @@ -19,12 +19,12 @@ from __future__ import annotations +# [START howto_task_group_decorator] import pendulum from airflow.sdk import DAG, task, task_group -# [START howto_task_group_decorator] # Creating Tasks @task def task_start(): diff --git a/airflow-core/src/airflow/example_dags/example_time_delta_sensor_async.py b/airflow-core/src/airflow/example_dags/example_time_delta_sensor_async.py index 7b847e6871e37..726fa2cf5c5df 100644 --- a/airflow-core/src/airflow/example_dags/example_time_delta_sensor_async.py +++ b/airflow-core/src/airflow/example_dags/example_time_delta_sensor_async.py @@ -16,8 +16,7 @@ # specific language governing permissions and limitations # under the License. """ -Example DAG demonstrating ``TimeDeltaSensorAsync``, a drop in replacement for ``TimeDeltaSensor`` that -defers and doesn't occupy a worker slot while it waits +Example DAG demonstrating ``TimeDeltaSensor``, that defers and doesn't occupy a worker slot while it waits """ from __future__ import annotations @@ -27,7 +26,7 @@ import pendulum from airflow.providers.standard.operators.empty import EmptyOperator -from airflow.providers.standard.sensors.time_delta import TimeDeltaSensorAsync +from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor from airflow.sdk import DAG with DAG( @@ -37,6 +36,6 @@ catchup=False, tags=["example"], ) as dag: - wait = TimeDeltaSensorAsync(task_id="wait", delta=datetime.timedelta(seconds=30)) + wait = TimeDeltaSensor(task_id="wait", delta=datetime.timedelta(seconds=30), deferrable=True) finish = EmptyOperator(task_id="finish") wait >> finish diff --git a/airflow-core/src/airflow/example_dags/example_xcomargs.py b/airflow-core/src/airflow/example_dags/example_xcomargs.py index 6337cf482d98f..a64beb513baef 100644 --- a/airflow-core/src/airflow/example_dags/example_xcomargs.py +++ b/airflow-core/src/airflow/example_dags/example_xcomargs.py @@ -19,6 +19,7 @@ from __future__ import annotations +# [START example_xcomargs] import logging import pendulum @@ -63,3 +64,4 @@ def print_value(value, ts=None): xcom_args_b = print_value("second!") bash_op1 >> xcom_args_a >> xcom_args_b >> bash_op2 +# [END example_xcomargs] diff --git a/airflow-core/src/airflow/example_dags/standard b/airflow-core/src/airflow/example_dags/standard new file mode 120000 index 0000000000000..3c2ef23d52c55 --- /dev/null +++ b/airflow-core/src/airflow/example_dags/standard @@ -0,0 +1 @@ +../../../../providers/standard/src/airflow/providers/standard/example_dags \ No newline at end of file diff --git a/airflow-core/src/airflow/example_dags/tutorial_objectstorage.py b/airflow-core/src/airflow/example_dags/tutorial_objectstorage.py index 8d5d57626ff38..66620122d6045 100644 --- a/airflow-core/src/airflow/example_dags/tutorial_objectstorage.py +++ b/airflow-core/src/airflow/example_dags/tutorial_objectstorage.py @@ -15,8 +15,11 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + from __future__ import annotations +from collections.abc import Mapping + # [START tutorial] # [START import_module] import pendulum @@ -26,19 +29,17 @@ # [END import_module] -API = "https://opendata.fmi.fi/timeseries" +API = "https://air-quality-api.open-meteo.com/v1/air-quality" aq_fields = { - "fmisid": "int32", - "time": "datetime64[ns]", - "AQINDEX_PT1H_avg": "float64", - "PM10_PT1H_avg": "float64", - "PM25_PT1H_avg": "float64", - "O3_PT1H_avg": "float64", - "CO_PT1H_avg": "float64", - "SO2_PT1H_avg": "float64", - "NO2_PT1H_avg": "float64", - "TRSC_PT1H_avg": "float64", + "pm10": "float64", + "pm2_5": "float64", + "carbon_monoxide": "float64", + "nitrogen_dioxide": "float64", + "sulphur_dioxide": "float64", + "ozone": "float64", + "european_aqi": "float64", + "us_aqi": "float64", } # [START create_object_storage_path] @@ -72,40 +73,44 @@ def get_air_quality_data(**kwargs) -> ObjectStoragePath: import pandas as pd logical_date = kwargs["logical_date"] - start_time = kwargs["data_interval_start"] - - params = { - "format": "json", - "precision": "double", - "groupareas": "0", - "producer": "airquality_urban", - "area": "Uusimaa", - "param": ",".join(aq_fields.keys()), - "starttime": start_time.isoformat(timespec="seconds"), - "endtime": logical_date.isoformat(timespec="seconds"), - "tz": "UTC", + + latitude = 28.6139 + longitude = 77.2090 + + params: Mapping[str, str | float] = { + "latitude": latitude, + "longitude": longitude, + "hourly": ",".join(aq_fields.keys()), + "timezone": "UTC", } response = requests.get(API, params=params) response.raise_for_status() + data = response.json() + hourly_data = data.get("hourly", {}) + + df = pd.DataFrame(hourly_data) + + df["time"] = pd.to_datetime(df["time"]) + # ensure the bucket exists base.mkdir(exist_ok=True) formatted_date = logical_date.format("YYYYMMDD") path = base / f"air_quality_{formatted_date}.parquet" - df = pd.DataFrame(response.json()).astype(aq_fields) with path.open("wb") as file: df.to_parquet(file) - return path # [END get_air_quality_data] # [START analyze] @task - def analyze(path: ObjectStoragePath, **kwargs): + def analyze( + path: ObjectStoragePath, + ): """ #### Analyze This task analyzes the air quality data, prints the results @@ -114,7 +119,10 @@ def analyze(path: ObjectStoragePath, **kwargs): conn = duckdb.connect(database=":memory:") conn.register_filesystem(path.fs) - conn.execute(f"CREATE OR REPLACE TABLE airquality_urban AS SELECT * FROM read_parquet('{path}')") + s3_path = path.path + conn.execute( + f"CREATE OR REPLACE TABLE airquality_urban AS SELECT * FROM read_parquet('{path.protocol}://{s3_path}')" + ) df2 = conn.execute("SELECT * FROM airquality_urban").fetchdf() diff --git a/airflow-core/src/airflow/example_dags/tutorial_taskflow_templates.py b/airflow-core/src/airflow/example_dags/tutorial_taskflow_templates.py index 8bb4c91547e99..fad0e7da042b5 100644 --- a/airflow-core/src/airflow/example_dags/tutorial_taskflow_templates.py +++ b/airflow-core/src/airflow/example_dags/tutorial_taskflow_templates.py @@ -21,8 +21,7 @@ # [START import_module] import pendulum -from airflow.providers.standard.operators.python import get_current_context -from airflow.sdk import dag, task +from airflow.sdk import dag, get_current_context, task # [END import_module] diff --git a/airflow-core/src/airflow/exceptions.py b/airflow-core/src/airflow/exceptions.py index 741c1d2069640..c37c8689b74fa 100644 --- a/airflow-core/src/airflow/exceptions.py +++ b/airflow-core/src/airflow/exceptions.py @@ -21,7 +21,6 @@ from __future__ import annotations -import warnings from collections.abc import Collection, Sequence from datetime import datetime, timedelta from http import HTTPStatus @@ -30,8 +29,6 @@ from airflow.utils.trigger_rule import TriggerRule if TYPE_CHECKING: - from collections.abc import Sized - from airflow.models import DagRun from airflow.sdk.definitions.asset import AssetNameRef, AssetUniqueKey, AssetUriRef from airflow.utils.state import DagRunState @@ -102,10 +99,6 @@ class AirflowTaskTerminated(BaseException): """Raise when the task execution is terminated.""" -class AirflowWebServerTimeout(AirflowException): - """Raise when the web server times out.""" - - class AirflowSkipException(AirflowException): """Raise when the task should be skipped.""" @@ -181,42 +174,6 @@ def serialize(self): ) -class XComForMappingNotPushed(AirflowException): - """Raise when a mapped downstream's dependency fails to push XCom for task mapping.""" - - def __str__(self) -> str: - return "did not push XCom for task mapping" - - -class UnmappableXComTypePushed(AirflowException): - """Raise when an unmappable type is pushed as a mapped downstream's dependency.""" - - def __init__(self, value: Any, *values: Any) -> None: - super().__init__(value, *values) - - def __str__(self) -> str: - typename = type(self.args[0]).__qualname__ - for arg in self.args[1:]: - typename = f"{typename}[{type(arg).__qualname__}]" - return f"unmappable return type {typename!r}" - - -class UnmappableXComLengthPushed(AirflowException): - """Raise when the pushed value is too large to map as a downstream's dependency.""" - - def __init__(self, value: Sized, max_length: int) -> None: - super().__init__(value) - self.value = value - self.max_length = max_length - - def __str__(self) -> str: - return f"unmappable return value length: {len(self.value)} > {self.max_length}" - - -class AirflowDagCycleException(AirflowException): - """Raise when there is a cycle in DAG definition.""" - - class AirflowDagDuplicatedIdException(AirflowException): """Raise when a DAG's ID is already used by another DAG.""" @@ -246,17 +203,6 @@ class AirflowTimetableInvalid(AirflowException): """Raise when a DAG has an invalid timetable.""" -class DagIsPaused(AirflowException): - """Raise when a dag is paused and something tries to run it.""" - - def __init__(self, dag_id: str) -> None: - super().__init__(dag_id) - self.dag_id = dag_id - - def __str__(self) -> str: - return f"Dag {self.dag_id} is paused" - - class DagNotFound(AirflowNotFoundException): """Raise when a DAG is not available in the system.""" @@ -295,14 +241,6 @@ def serialize(self): ) -class DagFileExists(AirflowBadRequest): - """Raise when a DAG ID is still in DagBag i.e., DAG file is in DAG folder.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - warnings.warn("DagFileExists is deprecated and will be removed.", DeprecationWarning, stacklevel=2) - - class FailFastDagInvalidTriggerRule(AirflowException): """Raise when a dag has 'fail_fast' enabled yet has a non-default trigger rule.""" @@ -405,10 +343,18 @@ def __str__(self): return result +class AirflowUnsupportedFileTypeException(AirflowException): + """Raise when a file type is not supported.""" + + class ConnectionNotUnique(AirflowException): """Raise when multiple values are found for the same connection ID.""" +class VariableNotUnique(AirflowException): + """Raise when multiple values are found for the same variable name.""" + + class DownstreamTasksSkipped(AirflowException): """ Signal by an operator to skip its downstream tasks. @@ -569,3 +515,21 @@ def __str__(self): class UnknownExecutorException(ValueError): """Raised when an attempt is made to load an executor which is not configured.""" + + +def __getattr__(name: str): + """Provide backward compatibility for moved exceptions.""" + if name == "AirflowDagCycleException": + import warnings + + from airflow.sdk.exceptions import AirflowDagCycleException + + warnings.warn( + "airflow.exceptions.AirflowDagCycleException is deprecated. " + "Use airflow.sdk.exceptions.AirflowDagCycleException instead.", + DeprecationWarning, + stacklevel=2, + ) + return AirflowDagCycleException + + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") diff --git a/airflow-core/src/airflow/executors/base_executor.py b/airflow-core/src/airflow/executors/base_executor.py index 0601696877885..ebd781271012d 100644 --- a/airflow-core/src/airflow/executors/base_executor.py +++ b/airflow-core/src/airflow/executors/base_executor.py @@ -23,18 +23,19 @@ from collections.abc import Sequence from dataclasses import dataclass, field from functools import cached_property -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any import pendulum from airflow.cli.cli_config import DefaultHelpParser from airflow.configuration import conf +from airflow.executors import workloads from airflow.executors.executor_loader import ExecutorLoader from airflow.models import Log from airflow.stats import Stats from airflow.traces import NO_TRACE_ID -from airflow.traces.tracer import Trace, add_span, gen_context -from airflow.traces.utils import gen_span_id_from_ti_key, gen_trace_id +from airflow.traces.tracer import DebugTrace, Trace, add_debug_span, gen_context +from airflow.traces.utils import gen_span_id_from_ti_key from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.state import TaskInstanceState from airflow.utils.thread_safe_dict import ThreadSafeDict @@ -51,28 +52,14 @@ from airflow.callbacks.base_callback_sink import BaseCallbackSink from airflow.callbacks.callback_requests import CallbackRequest from airflow.cli.cli_config import GroupCommand - from airflow.executors import workloads from airflow.executors.executor_utils import ExecutorName from airflow.models.taskinstance import TaskInstance from airflow.models.taskinstancekey import TaskInstanceKey - # Command to execute - list of strings - # the first element is always "airflow". - # It should be result of TaskInstance.generate_command method. - CommandType = Sequence[str] - - # Task that is queued. It contains all the information that is - # needed to run the task. - # - # Tuple of: command, priority, queue name, TaskInstance - QueuedTaskInstanceType = tuple[CommandType, int, Optional[str], TaskInstance] - # Event_buffer dict value type # Tuple of: state, info - EventBufferValueType = tuple[Optional[str], Any] + EventBufferValueType = tuple[str | None, Any] - # Task tuple to send to be executed - TaskTuple = tuple[TaskInstanceKey, CommandType, Optional[str], Optional[Any]] log = logging.getLogger(__name__) @@ -112,7 +99,7 @@ def can_try_again(self): class BaseExecutor(LoggingMixin): """ - Base class to inherit for concrete executors such as Celery, Kubernetes, Local, Sequential, etc. + Base class to inherit for concrete executors such as Celery, Kubernetes, Local, etc. :param parallelism: how many jobs should run at one time. """ @@ -159,7 +146,7 @@ def __init__(self, parallelism: int = PARALLELISM, team_id: str | None = None): self.parallelism: int = parallelism self.team_id: str | None = team_id - self.queued_tasks: dict[TaskInstanceKey, QueuedTaskInstanceType] = {} + self.queued_tasks: dict[TaskInstanceKey, workloads.ExecuteTask] = {} self.running: set[TaskInstanceKey] = set() self.event_buffer: dict[TaskInstanceKey, EventBufferValueType] = {} self._task_event_logs: deque[Log] = deque() @@ -192,62 +179,23 @@ def log_task_event(self, *, event: str, extra: str, ti_key: TaskInstanceKey): """Add an event to the log table.""" self._task_event_logs.append(Log(event=event, task_instance=ti_key, extra=extra)) - def queue_command( - self, - task_instance: TaskInstance, - command: CommandType, - priority: int = 1, - queue: str | None = None, - ): - """Queues command to task.""" - if task_instance.key not in self.queued_tasks: - self.log.info("Adding to queue: %s", command) - self.queued_tasks[task_instance.key] = (command, priority, queue, task_instance) - else: - self.log.error("could not queue task %s", task_instance.key) - def queue_workload(self, workload: workloads.All, session: Session) -> None: - raise ValueError(f"Un-handled workload kind {type(workload).__name__!r} in {type(self).__name__}") - - def queue_task_instance( - self, - task_instance: TaskInstance, - mark_success: bool = False, - ignore_all_deps: bool = False, - ignore_depends_on_past: bool = False, - wait_for_past_depends_before_skipping: bool = False, - ignore_task_deps: bool = False, - ignore_ti_state: bool = False, - pool: str | None = None, - cfg_path: str | None = None, - ) -> None: - """Queues task instance.""" - if TYPE_CHECKING: - assert task_instance.task - - pool = pool or task_instance.pool - - command_list_to_run = task_instance.command_as_list( - local=True, - mark_success=mark_success, - ignore_all_deps=ignore_all_deps, - ignore_depends_on_past=ignore_depends_on_past, - wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, - ignore_task_deps=ignore_task_deps, - ignore_ti_state=ignore_ti_state, - pool=pool, - # cfg_path is needed to propagate the config values if using impersonation - # (run_as_user), given that there are different code paths running tasks. - # https://github.com/apache/airflow/pull/2991 - cfg_path=cfg_path, - ) - self.log.debug("created command %s", command_list_to_run) - self.queue_command( - task_instance, - command_list_to_run, - priority=task_instance.priority_weight, - queue=task_instance.task.queue, - ) + if not isinstance(workload, workloads.ExecuteTask): + raise ValueError(f"Un-handled workload kind {type(workload).__name__!r} in {type(self).__name__}") + ti = workload.ti + self.queued_tasks[ti.key] = workload + + def _process_workloads(self, workloads: Sequence[workloads.All]) -> None: + """ + Process the given workloads. + + This method must be implemented by subclasses to define how they handle + the execution of workloads (e.g., queuing them to workers, submitting to + external systems, etc.). + + :param workloads: List of workloads to process + """ + raise NotImplementedError(f"{type(self).__name__} must implement _process_workloads()") def has_task(self, task_instance: TaskInstance) -> bool: """ @@ -270,7 +218,7 @@ def sync(self) -> None: Executors should override this to perform gather statuses. """ - @add_span + @add_debug_span def heartbeat(self) -> None: """Heartbeat sent to trigger new jobs.""" open_slots = self.parallelism - len(self.running) @@ -347,33 +295,23 @@ def _emit_metrics(self, open_slots, num_running_tasks, num_queued_tasks): tags={"status": "running", "name": name}, ) - def order_queued_tasks_by_priority(self) -> list[tuple[TaskInstanceKey, QueuedTaskInstanceType]]: + def order_queued_tasks_by_priority(self) -> list[tuple[TaskInstanceKey, workloads.ExecuteTask]]: """ Orders the queued tasks by priority. - :return: List of tuples from the queued_tasks according to the priority. + :return: List of workloads from the queued_tasks according to the priority. """ - from airflow.executors import workloads - if not self.queued_tasks: return [] - kind = next(iter(self.queued_tasks.values())) - if isinstance(kind, workloads.BaseWorkload): - # V3 + new executor that supports workloads - return sorted( - self.queued_tasks.items(), - key=lambda x: x[1].ti.priority_weight, - reverse=True, - ) - + # V3 + new executor that supports workloads return sorted( self.queued_tasks.items(), - key=lambda x: x[1][1], + key=lambda x: x[1].ti.priority_weight, reverse=True, ) - @add_span + @add_debug_span def trigger_tasks(self, open_slots: int) -> None: """ Initiate async execution of the queued tasks, up to the number of available slots. @@ -381,7 +319,6 @@ def trigger_tasks(self, open_slots: int) -> None: :param open_slots: Number of open slots """ sorted_queue = self.order_queued_tasks_by_priority() - task_tuples = [] workload_list = [] for _ in range(min((open_slots, len(self.queued_tasks)))): @@ -397,103 +334,36 @@ def trigger_tasks(self, open_slots: int) -> None: # deferred task has completed. In this case and for this reason, # we make a small number of attempts to see if the task has been # removed from the running set in the meantime. - if key in self.running: - attempt = self.attempts[key] - if attempt.can_try_again(): - # if it hasn't been much time since first check, let it be checked again next time - self.log.info("queued but still running; attempt=%s task=%s", attempt.total_tries, key) - continue - - # Otherwise, we give up and remove the task from the queue. - self.log.error( - "could not queue task %s (still running after %d attempts).", - key, - attempt.total_tries, - ) - self.log_task_event( - event="task launch failure", - extra=( - "Task was in running set and could not be queued " - f"after {attempt.total_tries} attempts." - ), - ti_key=key, - ) + if key in self.attempts: del self.attempts[key] - del self.queued_tasks[key] - else: - if key in self.attempts: - del self.attempts[key] - # TODO: TaskSDK: Compat, remove when KubeExecutor is fully moved over to TaskSDK too. - # TODO: TaskSDK: We need to minimum version requirements on executors with Airflow 3. - # How/where do we do that? Executor loader? - from airflow.executors import workloads - - if isinstance(item, workloads.ExecuteTask) and hasattr(item, "ti"): - ti = item.ti - - # If it's None, then the span for the current TaskInstanceKey hasn't been started. - if self.active_spans is not None and self.active_spans.get(key) is None: - from airflow.models.taskinstance import SimpleTaskInstance - - if isinstance(ti, (SimpleTaskInstance, workloads.TaskInstance)): - parent_context = Trace.extract(ti.parent_context_carrier) - else: - parent_context = Trace.extract(ti.dag_run.context_carrier) - # Start a new span using the context from the parent. - # Attributes will be set once the task has finished so that all - # values will be available (end_time, duration, etc.). - - span = Trace.start_child_span( - span_name=f"{ti.task_id}", - parent_context=parent_context, - component="task", - start_as_current=False, - ) - self.active_spans.set(key, span) - # Inject the current context into the carrier. - carrier = Trace.inject() - ti.context_carrier = carrier - - if hasattr(self, "_process_workloads"): - workload_list.append(item) - else: - (command, _, queue, ti) = item - task_tuples.append((key, command, queue, getattr(ti, "executor_config", None))) - - if task_tuples: - self._process_tasks(task_tuples) - elif workload_list: - self._process_workloads(workload_list) # type: ignore[attr-defined] - - @add_span - def _process_tasks(self, task_tuples: list[TaskTuple]) -> None: - for key, command, queue, executor_config in task_tuples: - task_instance = self.queued_tasks[key][3] # TaskInstance in fourth element - trace_id = int(gen_trace_id(task_instance.dag_run, as_int=True)) - span_id = int(gen_span_id_from_ti_key(key, as_int=True)) - links = [{"trace_id": trace_id, "span_id": span_id}] - # assuming that the span_id will very likely be unique inside the trace - with Trace.start_span( - span_name=f"{key.dag_id}.{key.task_id}", - component="BaseExecutor", - span_id=span_id, - links=links, - ) as span: - span.set_attributes( - { - "dag_id": key.dag_id, - "run_id": key.run_id, - "task_id": key.task_id, - "try_number": key.try_number, - "command": str(command), - "queue": str(queue), - "executor_config": str(executor_config), - } - ) - del self.queued_tasks[key] - self.execute_async(key=key, command=command, queue=queue, executor_config=executor_config) - self.running.add(key) + if isinstance(item, workloads.ExecuteTask) and hasattr(item, "ti"): + ti = item.ti + + # If it's None, then the span for the current id hasn't been started. + if self.active_spans is not None and self.active_spans.get("ti:" + str(ti.id)) is None: + if isinstance(ti, workloads.TaskInstance): + parent_context = Trace.extract(ti.parent_context_carrier) + else: + parent_context = Trace.extract(ti.dag_run.context_carrier) + # Start a new span using the context from the parent. + # Attributes will be set once the task has finished so that all + # values will be available (end_time, duration, etc.). + + span = Trace.start_child_span( + span_name=f"{ti.task_id}", + parent_context=parent_context, + component="task", + start_as_current=False, + ) + self.active_spans.set("ti:" + str(ti.id), span) + # Inject the current context into the carrier. + carrier = Trace.inject() + ti.context_carrier = carrier + + workload_list.append(item) + if workload_list: + self._process_workloads(workload_list) # TODO: This should not be using `TaskInstanceState` here, this is just "did the process complete, or did # it die". It is possible for the task itself to finish with success, but the state of the task to be set @@ -527,7 +397,7 @@ def fail(self, key: TaskInstanceKey, info=None) -> None: trace_id = Trace.get_current_span().get_span_context().trace_id if trace_id != NO_TRACE_ID: span_id = int(gen_span_id_from_ti_key(key, as_int=True)) - with Trace.start_span( + with DebugTrace.start_span( span_name="fail", component="BaseExecutor", parent_sc=gen_context(trace_id=trace_id, span_id=span_id), @@ -554,7 +424,7 @@ def success(self, key: TaskInstanceKey, info=None) -> None: trace_id = Trace.get_current_span().get_span_context().trace_id if trace_id != NO_TRACE_ID: span_id = int(gen_span_id_from_ti_key(key, as_int=True)) - with Trace.start_span( + with DebugTrace.start_span( span_name="success", component="BaseExecutor", parent_sc=gen_context(trace_id=trace_id, span_id=span_id), @@ -609,23 +479,6 @@ def get_event_buffer(self, dag_ids=None) -> dict[TaskInstanceKey, EventBufferVal return cleared_events - def execute_async( - self, - key: TaskInstanceKey, - command: CommandType, - queue: str | None = None, - executor_config: Any | None = None, - ) -> None: # pragma: no cover - """ - Execute the command asynchronously. - - :param key: Unique key for the task instance - :param command: Command to run - :param queue: name of the queue - :param executor_config: Configuration passed to the executor. - """ - raise NotImplementedError() - def get_task_log(self, ti: TaskInstance, try_number: int) -> tuple[list[str], list[str]]: """ Return the task logs. @@ -683,28 +536,6 @@ def slots_occupied(self): """Number of tasks this executor instance is currently managing.""" return len(self.running) + len(self.queued_tasks) - @staticmethod - def validate_airflow_tasks_run_command(command: Sequence[str]) -> tuple[str | None, str | None]: - """ - Check if the command to execute is airflow command. - - Returns tuple (dag_id,task_id) retrieved from the command (replaced with None values if missing) - """ - if command[0:3] != ["airflow", "tasks", "run"]: - raise ValueError('The command must start with ["airflow", "tasks", "run"].') - if len(command) > 3 and "--help" not in command: - dag_id: str | None = None - task_id: str | None = None - for arg in command[3:]: - if not arg.startswith("--"): - if dag_id is None: - dag_id = arg - else: - task_id = arg - break - return dag_id, task_id - return None, None - def debug_dump(self): """Get called in response to SIGUSR2 by the scheduler.""" self.log.info( diff --git a/airflow-core/src/airflow/executors/local_executor.py b/airflow-core/src/airflow/executors/local_executor.py index 87a8e71d3589d..6ab00cced3238 100644 --- a/airflow-core/src/airflow/executors/local_executor.py +++ b/airflow-core/src/airflow/executors/local_executor.py @@ -31,7 +31,7 @@ import multiprocessing.sharedctypes import os from multiprocessing import Queue, SimpleQueue -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from setproctitle import setproctitle @@ -43,7 +43,7 @@ if TYPE_CHECKING: from sqlalchemy.orm import Session - TaskInstanceStateType = tuple[workloads.TaskInstance, TaskInstanceState, Optional[Exception]] + TaskInstanceStateType = tuple[workloads.TaskInstance, TaskInstanceState, Exception | None] def _run_worker( @@ -108,6 +108,13 @@ def _execute_work(log: logging.Logger, workload: workloads.ExecuteTask) -> None: from airflow.sdk.execution_time.supervisor import supervise setproctitle(f"airflow worker -- LocalExecutor: {workload.ti.id}") + + base_url = conf.get("api", "base_url", fallback="/") + # If it's a relative URL, use localhost:8080 as the default + if base_url.startswith("/"): + base_url = f"http://localhost:8080{base_url}" + default_execution_api_server = f"{base_url.rstrip('/')}/execution/" + # This will return the exit code of the task process, but we don't care about that, just if the # _supervisor_ had an error reporting the state back (which will result in an exception.) supervise( @@ -116,7 +123,7 @@ def _execute_work(log: logging.Logger, workload: workloads.ExecuteTask) -> None: dag_rel_path=workload.dag_rel_path, bundle_info=workload.bundle_info, token=workload.token, - server=conf.get("core", "execution_api_server_url"), + server=conf.get("core", "execution_api_server_url", fallback=default_execution_api_server), log_path=workload.log_path, ) @@ -155,7 +162,7 @@ def start(self) -> None: # Mypy sees this value as `SynchronizedBase[c_uint]`, but that isn't the right runtime type behaviour # (it looks like an int to python) - self._unread_messages = multiprocessing.Value(ctypes.c_uint) # type: ignore[assignment] + self._unread_messages = multiprocessing.Value(ctypes.c_uint) def _check_workers(self): # Reap any dead workers diff --git a/airflow-core/src/airflow/executors/workloads.py b/airflow-core/src/airflow/executors/workloads.py index 3da9fe3cede39..43a4aab1dbc47 100644 --- a/airflow-core/src/airflow/executors/workloads.py +++ b/airflow-core/src/airflow/executors/workloads.py @@ -20,7 +20,7 @@ import uuid from datetime import datetime from pathlib import Path -from typing import TYPE_CHECKING, Annotated, Literal, Union +from typing import TYPE_CHECKING, Annotated, Literal import structlog from pydantic import BaseModel, Field @@ -55,7 +55,7 @@ class TaskInstance(BaseModel): """Schema for TaskInstance with minimal required fields needed for Executors and Task SDK.""" id: uuid.UUID - + dag_version_id: uuid.UUID task_id: str dag_id: str run_id: str @@ -69,7 +69,6 @@ class TaskInstance(BaseModel): parent_context_carrier: dict | None = None context_carrier: dict | None = None - queued_dttm: datetime | None = None # TODO: Task-SDK: Can we replace TastInstanceKey with just the uuid across the codebase? @property @@ -160,6 +159,6 @@ class RunTrigger(BaseModel): All = Annotated[ - Union[ExecuteTask, RunTrigger], + ExecuteTask | RunTrigger, Field(discriminator="type"), ] diff --git a/airflow-core/src/airflow/hooks/__init__.py b/airflow-core/src/airflow/hooks/__init__.py index 9b9ef41aa89b9..6bd03fa282b09 100644 --- a/airflow-core/src/airflow/hooks/__init__.py +++ b/airflow-core/src/airflow/hooks/__init__.py @@ -31,5 +31,8 @@ "subprocess": { "SubprocessHook": "airflow.providers.standard.hooks.subprocess.SubprocessHook", }, + "base": { + "BaseHook": "airflow.sdk.bases.hook.BaseHook", + }, } add_deprecated_classes(__deprecated_classes, __name__) diff --git a/airflow-core/src/airflow/hooks/base.py b/airflow-core/src/airflow/hooks/base.py index 88a72ddf6e2bf..41a9b26ac9f8e 100644 --- a/airflow-core/src/airflow/hooks/base.py +++ b/airflow-core/src/airflow/hooks/base.py @@ -19,75 +19,7 @@ from __future__ import annotations -import logging -from typing import TYPE_CHECKING, Any, Protocol - -from airflow.utils.log.logging_mixin import LoggingMixin - -if TYPE_CHECKING: - from airflow.models.connection import Connection # Avoid circular imports. - -log = logging.getLogger(__name__) - - -class BaseHook(LoggingMixin): - """ - Abstract base class for hooks. - - Hooks are meant as an interface to - interact with external systems. MySqlHook, HiveHook, PigHook return - object that can handle the connection and interaction to specific - instances of these systems, and expose consistent methods to interact - with them. - - :param logger_name: Name of the logger used by the Hook to emit logs. - If set to `None` (default), the logger name will fall back to - `airflow.task.hooks.{class.__module__}.{class.__name__}` (e.g. DbApiHook will have - *airflow.task.hooks.airflow.providers.common.sql.hooks.sql.DbApiHook* as logger). - """ - - def __init__(self, logger_name: str | None = None): - super().__init__() - self._log_config_logger_name = "airflow.task.hooks" - self._logger_name = logger_name - - @classmethod - def get_connection(cls, conn_id: str) -> Connection: - """ - Get connection, given connection id. - - :param conn_id: connection id - :return: connection - """ - from airflow.models.connection import Connection - - conn = Connection.get_connection_from_secrets(conn_id) - log.info("Connection Retrieved '%s'", conn.conn_id) - return conn - - @classmethod - def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> BaseHook: - """ - Return default hook for this connection id. - - :param conn_id: connection id - :param hook_params: hook parameters - :return: default hook for this connection - """ - connection = cls.get_connection(conn_id) - return connection.get_hook(hook_params=hook_params) - - def get_conn(self) -> Any: - """Return connection for the hook.""" - raise NotImplementedError() - - @classmethod - def get_connection_form_widgets(cls) -> dict[str, Any]: - return {} - - @classmethod - def get_ui_field_behaviour(cls) -> dict[str, Any]: - return {} +from typing import Any, Protocol class DiscoverableHook(Protocol): diff --git a/airflow-core/src/airflow/io/__init__.py b/airflow-core/src/airflow/io/__init__.py index 01e0cfaabe2d5..3b255aacdf82a 100644 --- a/airflow-core/src/airflow/io/__init__.py +++ b/airflow-core/src/airflow/io/__init__.py @@ -16,103 +16,26 @@ # under the License. from __future__ import annotations -import inspect -import logging -from collections.abc import Mapping -from functools import cache -from typing import ( - TYPE_CHECKING, - Callable, +from airflow.utils.deprecation_tools import add_deprecated_classes + +add_deprecated_classes( + { + __name__: { + "get_fs": "airflow.sdk.io.get_fs", + "has_fs": "airflow.sdk.io.has_fs", + "attach": "airflow.sdk.io.attach", + "Properties": "airflow.sdk.io.Properties", + "_BUILTIN_SCHEME_TO_FS": "airflow.sdk.io.fs._BUILTIN_SCHEME_TO_FS", + }, + "path": { + "ObjectStoragePath": "airflow.sdk.ObjectStoragePath", + }, + "storage": { + "attach": "airflow.sdk.io.attach", + }, + "typedef": { + "Properties": "airflow.sdk.io.typedef.Properties", + }, + }, + package=__name__, ) - -from fsspec.implementations.local import LocalFileSystem - -from airflow.providers_manager import ProvidersManager -from airflow.stats import Stats -from airflow.utils.module_loading import import_string - -if TYPE_CHECKING: - from fsspec import AbstractFileSystem - - from airflow.io.typedef import Properties - - -log = logging.getLogger(__name__) - - -def _file(_: str | None, storage_options: Properties) -> LocalFileSystem: - return LocalFileSystem(**storage_options) - - -# builtin supported filesystems -_BUILTIN_SCHEME_TO_FS: dict[str, Callable[[str | None, Properties], AbstractFileSystem]] = { - "file": _file, - "local": _file, -} - - -@cache -def _register_filesystems() -> Mapping[ - str, - Callable[[str | None, Properties], AbstractFileSystem] | Callable[[str | None], AbstractFileSystem], -]: - scheme_to_fs = _BUILTIN_SCHEME_TO_FS.copy() - with Stats.timer("airflow.io.load_filesystems") as timer: - manager = ProvidersManager() - for fs_module_name in manager.filesystem_module_names: - fs_module = import_string(fs_module_name) - for scheme in getattr(fs_module, "schemes", []): - if scheme in scheme_to_fs: - log.warning("Overriding scheme %s for %s", scheme, fs_module_name) - - method = getattr(fs_module, "get_fs", None) - if method is None: - raise ImportError(f"Filesystem {fs_module_name} does not have a get_fs method") - scheme_to_fs[scheme] = method - - log.debug("loading filesystems from providers took %.3f seconds", timer.duration) - return scheme_to_fs - - -def get_fs( - scheme: str, conn_id: str | None = None, storage_options: Properties | None = None -) -> AbstractFileSystem: - """ - Get a filesystem by scheme. - - :param scheme: the scheme to get the filesystem for - :return: the filesystem method - :param conn_id: the airflow connection id to use - :param storage_options: the storage options to pass to the filesystem - """ - filesystems = _register_filesystems() - try: - fs = filesystems[scheme] - except KeyError: - raise ValueError(f"No filesystem registered for scheme {scheme}") from None - - options = storage_options or {} - - # MyPy does not recognize dynamic parameters inspection when we call the method, and we have to do - # it for compatibility reasons with already released providers, that's why we need to ignore - # mypy errors here - parameters = inspect.signature(fs).parameters - if len(parameters) == 1: - if options: - raise AttributeError( - f"Filesystem {scheme} does not support storage options, but options were passed." - f"This most likely means that you are using an old version of the provider that does not " - f"support storage options. Please upgrade the provider if possible." - ) - return fs(conn_id) # type: ignore[call-arg] - return fs(conn_id, options) # type: ignore[call-arg] - - -def has_fs(scheme: str) -> bool: - """ - Check if a filesystem is available for a scheme. - - :param scheme: the scheme to check - :return: True if a filesystem is available for the scheme - """ - return scheme in _register_filesystems() diff --git a/airflow-core/src/airflow/io/path.py b/airflow-core/src/airflow/io/path.py deleted file mode 100644 index bc323d0030bc5..0000000000000 --- a/airflow-core/src/airflow/io/path.py +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -from airflow.sdk import ObjectStoragePath - -__all__ = ["ObjectStoragePath"] diff --git a/airflow-core/src/airflow/io/storage.py b/airflow-core/src/airflow/io/storage.py deleted file mode 100644 index 4723e8a15f65a..0000000000000 --- a/airflow-core/src/airflow/io/storage.py +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -from airflow.sdk.io import attach - -__all__ = ["attach"] diff --git a/airflow-core/src/airflow/jobs/job.py b/airflow-core/src/airflow/jobs/job.py index 8697c4d2be259..67fb3be019293 100644 --- a/airflow-core/src/airflow/jobs/job.py +++ b/airflow-core/src/airflow/jobs/job.py @@ -17,23 +17,24 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from functools import cached_property, lru_cache from time import sleep -from typing import TYPE_CHECKING, Callable, NoReturn +from typing import TYPE_CHECKING, NoReturn from sqlalchemy import Column, Index, Integer, String, case, select from sqlalchemy.exc import OperationalError from sqlalchemy.orm import backref, foreign, relationship from sqlalchemy.orm.session import make_transient +from airflow._shared.timezones import timezone from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.executors.executor_loader import ExecutorLoader from airflow.listeners.listener import get_listener_manager from airflow.models.base import ID_LEN, Base from airflow.stats import Stats -from airflow.traces.tracer import Trace, add_span -from airflow.utils import timezone +from airflow.traces.tracer import DebugTrace, add_debug_span from airflow.utils.helpers import convert_camel_to_snake from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname @@ -109,6 +110,13 @@ class Job(Base, LoggingMixin): backref="creating_job", ) + dag_model = relationship( + "DagModel", + primaryjoin="Job.dag_id == DagModel.dag_id", + viewonly=True, + foreign_keys=[dag_id], + ) + """ TaskInstances which have been enqueued by this Job. @@ -200,7 +208,7 @@ def heartbeat( :param session to use for saving the job """ previous_heartbeat = self.latest_heartbeat - with Trace.start_span(span_name="heartbeat", component="Job") as span: + with DebugTrace.start_span(span_name="heartbeat", component="Job") as span: try: span.set_attribute("heartbeat", str(self.latest_heartbeat)) # This will cause it to load from the db @@ -385,7 +393,7 @@ def execute_job(job: Job, execute_callable: Callable[[], int | None]) -> int | N return ret -@add_span +@add_debug_span def perform_heartbeat( job: Job, heartbeat_callback: Callable[[Session], None], only_if_necessary: bool ) -> None: diff --git a/airflow-core/src/airflow/jobs/scheduler_job_runner.py b/airflow-core/src/airflow/jobs/scheduler_job_runner.py index 586a2053b83eb..85d743a12301c 100644 --- a/airflow-core/src/airflow/jobs/scheduler_job_runner.py +++ b/airflow-core/src/airflow/jobs/scheduler_job_runner.py @@ -25,39 +25,42 @@ import sys import time from collections import Counter, defaultdict, deque -from collections.abc import Collection, Iterable, Iterator +from collections.abc import Callable, Collection, Iterable, Iterator from contextlib import ExitStack -from datetime import date, timedelta +from datetime import date, datetime, timedelta from functools import lru_cache, partial from itertools import groupby -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any, cast -from sqlalchemy import and_, delete, exists, func, select, text, tuple_, update +from sqlalchemy import and_, delete, desc, exists, func, inspect, or_, select, text, tuple_, update from sqlalchemy.exc import OperationalError from sqlalchemy.orm import joinedload, lazyload, load_only, make_transient, selectinload from sqlalchemy.sql import expression from airflow import settings -from airflow.callbacks.callback_requests import DagCallbackRequest, TaskCallbackRequest +from airflow._shared.timezones import timezone +from airflow.api_fastapi.execution_api.datamodels.taskinstance import DagRun as DRDataModel, TIRunContext +from airflow.callbacks.callback_requests import DagCallbackRequest, DagRunContext, TaskCallbackRequest from airflow.configuration import conf from airflow.dag_processing.bundles.base import BundleUsageTrackingManager from airflow.executors import workloads -from airflow.executors.base_executor import BaseExecutor -from airflow.executors.executor_loader import ExecutorLoader from airflow.jobs.base_job_runner import BaseJobRunner from airflow.jobs.job import Job, perform_heartbeat -from airflow.models import Log +from airflow.models import Deadline, Log from airflow.models.asset import ( AssetActive, + AssetAliasModel, AssetDagRunQueue, AssetEvent, AssetModel, + DagScheduleAssetAliasReference, DagScheduleAssetReference, TaskOutletAssetReference, ) from airflow.models.backfill import Backfill from airflow.models.dag import DAG, DagModel from airflow.models.dag_version import DagVersion +from airflow.models.dagbag import DBDagBag from airflow.models.dagrun import DagRun from airflow.models.dagwarning import DagWarning, DagWarningType from airflow.models.serialized_dag import SerializedDagModel @@ -67,8 +70,7 @@ from airflow.ti_deps.dependencies_states import EXECUTION_STATES from airflow.timetables.simple import AssetTriggeredTimetable from airflow.traces import utils as trace_utils -from airflow.traces.tracer import Trace, add_span -from airflow.utils import timezone +from airflow.traces.tracer import DebugTrace, Trace, add_debug_span from airflow.utils.dates import datetime_to_nano from airflow.utils.event_scheduler import EventScheduler from airflow.utils.log.logging_mixin import LoggingMixin @@ -82,17 +84,17 @@ if TYPE_CHECKING: import logging - from datetime import datetime from types import FrameType from pendulum.datetime import DateTime from sqlalchemy.orm import Query, Session + from airflow.executors.base_executor import BaseExecutor from airflow.executors.executor_utils import ExecutorName + from airflow.models.mappedoperator import MappedOperator from airflow.models.taskinstance import TaskInstanceKey - from airflow.utils.sqlalchemy import ( - CommitProhibitorGuard, - ) + from airflow.serialization.serialized_objects import SerializedBaseOperator + from airflow.utils.sqlalchemy import CommitProhibitorGuard TI = TaskInstance DR = DagRun @@ -102,47 +104,6 @@ """:meta private:""" -class SchedulerDagBag: - """ - Internal class for retrieving and caching dags in the scheduler. - - :meta private: - """ - - def __init__(self): - self._dags: dict[str, DAG] = {} # dag_version_id to dag - - def _get_dag(self, version_id: str, session: Session) -> DAG | None: - if dag := self._dags.get(version_id): - return dag - dag_version = session.get(DagVersion, version_id, options=[joinedload(DagVersion.serialized_dag)]) - if not dag_version: - return None - serdag = dag_version.serialized_dag - if not serdag: - return None - serdag.load_op_links = False - dag = serdag.dag - if not dag: - return None - self._dags[version_id] = dag - return dag - - @staticmethod - def _version_from_dag_run(dag_run, session): - if dag_run.bundle_version: - dag_version = dag_run.created_dag_version - else: - dag_version = DagVersion.get_latest_version(dag_id=dag_run.dag_id, session=session) - return dag_version - - def get_dag(self, dag_run: DagRun, session: Session) -> DAG | None: - version = self._version_from_dag_run(dag_run=dag_run, session=session) - if not version: - return None - return self._get_dag(version_id=version.id, session=session) - - def _get_current_dag(dag_id: str, session: Session) -> DAG | None: serdag = SerializedDagModel.get(dag_id=dag_id, session=session) # grabs the latest version if not serdag: @@ -211,8 +172,10 @@ class SchedulerJobRunner(BaseJobRunner, LoggingMixin): # For a dagrun span # - key: dag_run.run_id | value: span + # - dagrun keys will be prefixed with 'dr:'. # For a ti span - # - key: ti.key | value: span + # - key: ti.id | value: span + # - taskinstance keys will be prefixed with 'ti:'. active_spans = ThreadSafeDict() def __init__( @@ -248,7 +211,7 @@ def __init__( if log: self._log = log - self.scheduler_dag_bag = SchedulerDagBag() + self.scheduler_dag_bag = DBDagBag(load_op_links=False) @provide_session def heartbeat_callback(self, session: Session = NEW_SESSION) -> None: @@ -539,7 +502,7 @@ def _executable_task_instances_to_queued(self, max_tis: int, session: Session) - if task_instance.dag_model.has_task_concurrency_limits: # Many dags don't have a task_concurrency, so where we can avoid loading the full # serialized DAG the better. - serialized_dag = self.scheduler_dag_bag.get_dag( + serialized_dag = self.scheduler_dag_bag.get_dag_for_run( dag_run=task_instance.dag_run, session=session ) # If the dag is missing, fail the task and continue to the next task. @@ -698,29 +661,8 @@ def _enqueue_task_instances_with_queued_state( ti.set_state(None, session=session) continue - # TODO: Task-SDK: This check is transitionary. Remove once all executors are ported over. - # Has a real queue_activity implemented - if executor.queue_workload.__func__ is not BaseExecutor.queue_workload: # type: ignore[attr-defined] - workload = workloads.ExecuteTask.make(ti, generator=executor.jwt_generator) - executor.queue_workload(workload, session=session) - continue - - command = ti.command_as_list( - local=True, - ) - - priority = ti.priority_weight - queue = ti.queue - self.log.info( - "Sending %s to %s with priority %s and queue %s", ti.key, executor.name, priority, queue - ) - - executor.queue_command( - ti, - command, - priority=priority, - queue=queue, - ) + workload = workloads.ExecuteTask.make(ti, generator=executor.jwt_generator) + executor.queue_workload(workload, session=session) def _critical_section_enqueue_task_instances(self, session: Session) -> int: """ @@ -779,6 +721,20 @@ def _process_task_event_logs(log_records: deque[Log], session: Session): objects = (log_records.popleft() for _ in range(len(log_records))) session.bulk_save_objects(objects=objects, preserve_order=False) + @staticmethod + def _is_metrics_enabled(): + return any( + [ + conf.getboolean("metrics", "statsd_datadog_enabled", fallback=False), + conf.getboolean("metrics", "statsd_on", fallback=False), + conf.getboolean("metrics", "otel_on", fallback=False), + ] + ) + + @staticmethod + def _is_tracing_enabled(): + return conf.getboolean("traces", "otel_on") + def _process_executor_events(self, executor: BaseExecutor, session: Session) -> int: return SchedulerJobRunner.process_executor_events( executor=executor, @@ -789,7 +745,7 @@ def _process_executor_events(self, executor: BaseExecutor, session: Session) -> @classmethod def process_executor_events( - cls, executor: BaseExecutor, job_id: str | None, scheduler_dag_bag: SchedulerDagBag, session: Session + cls, executor: BaseExecutor, job_id: str | None, scheduler_dag_bag: DBDagBag, session: Session ) -> int: """ Respond to executor events. @@ -826,6 +782,8 @@ def process_executor_events( select(TI) .where(filter_for_tis) .options(selectinload(TI.dag_model)) + .options(joinedload(TI.dag_run).selectinload(DagRun.consumed_asset_events)) + .options(joinedload(TI.dag_run).selectinload(DagRun.created_dag_version)) .options(joinedload(TI.dag_version)) ) # row lock this entire set of taskinstances to make sure the scheduler doesn't fail when we have @@ -873,11 +831,11 @@ def process_executor_events( ti.pid, ) - if (active_ti_span := cls.active_spans.get(ti.key)) is not None: + if (active_ti_span := cls.active_spans.get("ti:" + str(ti.id))) is not None: cls.set_ti_span_attrs(span=active_ti_span, state=state, ti=ti) # End the span and remove it from the active_spans dict. active_ti_span.end(end_time=datetime_to_nano(ti.end_date)) - cls.active_spans.delete(ti.key) + cls.active_spans.delete("ti:" + str(ti.id)) ti.span_status = SpanStatus.ENDED else: if ti.span_status == SpanStatus.ACTIVE: @@ -921,7 +879,7 @@ def process_executor_events( # Get task from the Serialized DAG try: - dag = scheduler_dag_bag.get_dag(dag_run=ti.dag_run, session=session) + dag = scheduler_dag_bag.get_dag_for_run(dag_run=ti.dag_run, session=session) cls.logger().error( "DAG '%s' for task instance %s not found in serialized_dag table", ti.dag_id, @@ -929,7 +887,9 @@ def process_executor_events( ) if TYPE_CHECKING: assert dag - task = dag.get_task(ti.task_id) + # TODO (GH-52141): get_task in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + task = cast("MappedOperator | SerializedBaseOperator", dag.get_task(ti.task_id)) except Exception: cls.logger().exception("Marking task instance %s as %s", ti, state) ti.set_state(state) @@ -945,10 +905,16 @@ def process_executor_events( bundle_version=ti.dag_version.bundle_version, ti=ti, msg=msg, + context_from_server=TIRunContext( + dag_run=DRDataModel.model_validate(ti.dag_run, from_attributes=True), + max_tries=ti.max_tries, + variables=[], + connections=[], + xcom_keys_to_clear=[], + ), ) executor.send_callback(request) - else: - ti.handle_failure(error=msg, session=session) + ti.handle_failure(error=msg, session=session) return len(event_buffer) @@ -988,8 +954,6 @@ def set_ti_span_attrs(cls, span, state, ti): def _execute(self) -> int | None: self.log.info("Starting the scheduler") - executor_class, _ = ExecutorLoader.import_default_executor_cls() - reset_signals = self.register_signals() try: callback_sink: DatabaseCallbackSink @@ -1014,7 +978,7 @@ def _execute(self) -> int | None: self._run_scheduler_loop() - settings.Session.remove() # type: ignore + settings.Session.remove() except Exception: self.log.exception("Exception when executing SchedulerJob._run_scheduler_loop") raise @@ -1046,7 +1010,7 @@ def _update_dag_run_state_for_paused_dags(self, session: Session = NEW_SESSION) .group_by(DagRun) ) for dag_run in paused_runs: - dag = self.scheduler_dag_bag.get_dag(dag_run=dag_run, session=session) + dag = self.scheduler_dag_bag.get_dag_for_run(dag_run=dag_run, session=session) if dag is not None: dag_run.dag = dag _, callback_to_run = dag_run.update_state(execute_callbacks=False, session=session) @@ -1058,27 +1022,23 @@ def _update_dag_run_state_for_paused_dags(self, session: Session = NEW_SESSION) @provide_session def _end_active_spans(self, session: Session = NEW_SESSION): # No need to do a commit for every update. The annotation will commit all of them once at the end. - for key, span in self.active_spans.get_all().items(): - from airflow.models.taskinstance import TaskInstanceKey - - if isinstance(key, TaskInstanceKey): # ti span. - # Can't compare the key directly because the try_number or the map_index might not be the same. - ti: TaskInstance = session.scalars( - select(TaskInstance).where( - TaskInstance.dag_id == key.dag_id, - TaskInstance.task_id == key.task_id, - TaskInstance.run_id == key.run_id, - ) - ).one() - if ti.state in State.finished: - self.set_ti_span_attrs(span=span, state=ti.state, ti=ti) - span.end(end_time=datetime_to_nano(ti.end_date)) - ti.span_status = SpanStatus.ENDED - else: - span.end() - ti.span_status = SpanStatus.NEEDS_CONTINUANCE - else: - dag_run: DagRun = session.scalars(select(DagRun).where(DagRun.run_id == key)).one() + for prefixed_key, span in self.active_spans.get_all().items(): + # Use partition to split on the first occurrence of ':'. + prefix, sep, key = prefixed_key.partition(":") + + if prefix == "ti": + ti: TaskInstance | None = session.get(TaskInstance, key) + + if ti is not None: + if ti.state in State.finished: + self.set_ti_span_attrs(span=span, state=ti.state, ti=ti) + span.end(end_time=datetime_to_nano(ti.end_date)) + ti.span_status = SpanStatus.ENDED + else: + span.end() + ti.span_status = SpanStatus.NEEDS_CONTINUANCE + elif prefix == "dr": + dag_run: DagRun = session.scalars(select(DagRun).where(DagRun.id == int(key))).one() if dag_run.state in State.finished_dr_states: dag_run.set_dagrun_span_attrs(span=span) @@ -1092,7 +1052,11 @@ def _end_active_spans(self, session: Session = NEW_SESSION): span_name="current_scheduler_exited", parent_context=initial_dag_run_context ) as s: s.set_attribute("trace_status", "needs continuance") + else: + self.log.error("Found key with unknown prefix: '%s'", prefixed_key) + # Even if there is a key with an unknown prefix, clear the dict. + # If this method has been called, the scheduler is exiting. self.active_spans.clear() def _end_spans_of_externally_ended_ops(self, session: Session): @@ -1114,7 +1078,7 @@ def _end_spans_of_externally_ended_ops(self, session: Session): ).all() for dag_run in dag_runs_should_end: - active_dagrun_span = self.active_spans.get(dag_run.run_id) + active_dagrun_span = self.active_spans.get("dr:" + str(dag_run.id)) if active_dagrun_span is not None: if dag_run.state in State.finished_dr_states: dag_run.set_dagrun_span_attrs(span=active_dagrun_span) @@ -1122,18 +1086,18 @@ def _end_spans_of_externally_ended_ops(self, session: Session): active_dagrun_span.end(end_time=datetime_to_nano(dag_run.end_date)) else: active_dagrun_span.end() - self.active_spans.delete(dag_run.run_id) + self.active_spans.delete("dr:" + str(dag_run.id)) dag_run.span_status = SpanStatus.ENDED for ti in tis_should_end: - active_ti_span = self.active_spans.get(ti.key) + active_ti_span = self.active_spans.get("ti:" + ti.id) if active_ti_span is not None: if ti.state in State.finished: self.set_ti_span_attrs(span=active_ti_span, state=ti.state, ti=ti) active_ti_span.end(end_time=datetime_to_nano(ti.end_date)) else: active_ti_span.end() - self.active_spans.delete(ti.key) + self.active_spans.delete("ti:" + ti.id) ti.span_status = SpanStatus.ENDED def _recreate_unhealthy_scheduler_spans_if_needed(self, dag_run: DagRun, session: Session): @@ -1168,7 +1132,7 @@ def _recreate_unhealthy_scheduler_spans_if_needed(self, dag_run: DagRun, session carrier = Trace.inject() # Update the context_carrier and leave the SpanStatus as ACTIVE. dag_run.context_carrier = carrier - self.active_spans.set(dag_run.run_id, dr_span) + self.active_spans.set("dr:" + str(dag_run.id), dr_span) tis = dag_run.get_task_instances(session=session) @@ -1181,7 +1145,7 @@ def _recreate_unhealthy_scheduler_spans_if_needed(self, dag_run: DagRun, session for ti in tis # If it has started and there is a reference on the active_spans dict, # then it was started by the current scheduler. - if ti.start_date is not None and self.active_spans.get(ti.key) is None + if ti.start_date is not None and self.active_spans.get("ti:" + ti.id) is None ] dr_context = Trace.extract(dag_run.context_carrier) @@ -1201,7 +1165,7 @@ def _recreate_unhealthy_scheduler_spans_if_needed(self, dag_run: DagRun, session ti.span_status = SpanStatus.ENDED else: ti.span_status = SpanStatus.ACTIVE - self.active_spans.set(ti.key, ti_span) + self.active_spans.set("ti:" + ti.id, ti_span) def _run_scheduler_loop(self) -> None: """ @@ -1215,6 +1179,8 @@ def _run_scheduler_loop(self) -> None: #. Heartbeat executor #. Execute queued tasks in executor asynchronously #. Sync on the states of running tasks + #. Check for expired Deadlines + #. Hand off processing the expired Deadlines if any are found """ is_unit_test: bool = conf.getboolean("core", "unit_test_mode") @@ -1238,15 +1204,17 @@ def _run_scheduler_loop(self) -> None: self._mark_backfills_complete, ) - timers.call_regular_interval( - conf.getfloat("scheduler", "pool_metrics_interval", fallback=5.0), - self._emit_pool_metrics, - ) + if self._is_metrics_enabled() or self._is_tracing_enabled(): + timers.call_regular_interval( + conf.getfloat("scheduler", "pool_metrics_interval", fallback=5.0), + self._emit_pool_metrics, + ) - timers.call_regular_interval( - conf.getfloat("scheduler", "running_metrics_interval", fallback=30.0), - self._emit_running_ti_metrics, - ) + if self._is_metrics_enabled(): + timers.call_regular_interval( + conf.getfloat("scheduler", "running_metrics_interval", fallback=30.0), + self._emit_running_ti_metrics, + ) timers.call_regular_interval( conf.getfloat("scheduler", "task_instance_heartbeat_timeout_detection_interval", fallback=10.0), @@ -1279,7 +1247,7 @@ def _run_scheduler_loop(self) -> None: for loop_count in itertools.count(start=1): with ( - Trace.start_span(span_name="scheduler_job_loop", component="SchedulerJobRunner") as span, + DebugTrace.start_span(span_name="scheduler_job_loop", component="SchedulerJobRunner") as span, Stats.timer("scheduler.scheduler_loop_duration") as timer, ): span.set_attributes( @@ -1290,7 +1258,8 @@ def _run_scheduler_loop(self) -> None: ) with create_session() as session: - self._end_spans_of_externally_ended_ops(session) + if self._is_tracing_enabled(): + self._end_spans_of_externally_ended_ops(session) # This will schedule for as many executors as possible. num_queued_tis = self._do_scheduling(session) @@ -1317,6 +1286,16 @@ def _run_scheduler_loop(self) -> None: except Exception: self.log.exception("Something went wrong when trying to save task event logs.") + with create_session() as session: + # Only retrieve expired deadlines that haven't been processed yet. + # `callback_state` is null/None by default until the handler set it. + for deadline in session.scalars( + select(Deadline) + .where(Deadline.deadline_time < datetime.now(timezone.utc)) + .where(Deadline.callback_state.is_(None)) + ): + deadline.handle_miss(session) + # Heartbeat the scheduler periodically perform_heartbeat( job=self.job, heartbeat_callback=self.heartbeat_callback, only_if_necessary=True @@ -1398,7 +1377,7 @@ def _do_scheduling(self, session: Session) -> int: # Send the callbacks after we commit to ensure the context is up to date when it gets run # cache saves time during scheduling of many dag_runs for same dag cached_get_dag: Callable[[DagRun], DAG | None] = lru_cache()( - partial(self.scheduler_dag_bag.get_dag, session=session) + partial(self.scheduler_dag_bag.get_dag_for_run, session=session) ) for dag_run, callback_to_run in callback_tuples: dag = cached_get_dag(dag_run) @@ -1487,7 +1466,7 @@ def _mark_backfills_complete(self, session: Session = NEW_SESSION) -> None: for b in backfills: b.completed_at = now - @add_span + @add_debug_span def _create_dag_runs(self, dag_models: Collection[DagModel], session: Session) -> None: """Create a DAG run and update the dag_model to control if/when the next DAGRun should be created.""" # Bulk Fetch DagRuns with dag_id and logical_date same @@ -1603,17 +1582,26 @@ def _create_dag_runs_asset_triggered( ) .cte() ) + asset_events = session.scalars( select(AssetEvent) - .join( - DagScheduleAssetReference, - AssetEvent.asset_id == DagScheduleAssetReference.asset_id, - ) .where( - DagScheduleAssetReference.dag_id == dag.dag_id, + or_( + AssetEvent.asset_id.in_( + select(DagScheduleAssetReference.asset_id).where( + DagScheduleAssetReference.dag_id == dag.dag_id + ) + ), + AssetEvent.source_aliases.any( + AssetAliasModel.scheduled_dags.any( + DagScheduleAssetAliasReference.dag_id == dag.dag_id + ) + ), + ), AssetEvent.timestamp <= triggered_date, AssetEvent.timestamp > func.coalesce(cte.c.previous_dag_run_run_after, date.min), ) + .order_by(AssetEvent.timestamp.asc(), AssetEvent.id.asc()) ).all() dag_run = dag.create_dagrun( @@ -1672,7 +1660,7 @@ def _should_update_dag_next_dagruns( return False return True - @add_span + @add_debug_span def _start_queued_dagruns(self, session: Session) -> None: """Find DagRuns in queued state and decide moving them to running state.""" # added all() to save runtime, otherwise query is executed more than once @@ -1689,7 +1677,7 @@ def _start_queued_dagruns(self, session: Session) -> None: ) active_runs_of_dags = Counter({(dag_id, br_id): num for dag_id, br_id, num in session.execute(query)}) - @add_span + @add_debug_span def _update_state(dag: DAG, dag_run: DagRun): span = Trace.get_current_span() span.set_attributes( @@ -1733,7 +1721,7 @@ def _update_state(dag: DAG, dag_run: DagRun): # cache saves time during scheduling of many dag_runs for same dag cached_get_dag: Callable[[DagRun], DAG | None] = lru_cache()( - partial(self.scheduler_dag_bag.get_dag, session=session) + partial(self.scheduler_dag_bag.get_dag_for_run, session=session) ) span = Trace.get_current_span() @@ -1811,7 +1799,7 @@ def _schedule_dag_run( span_id = int(trace_utils.gen_dag_span_id(dag_run=dag_run, as_int=True)) links = [{"trace_id": trace_id, "span_id": span_id}] - with Trace.start_span( + with DebugTrace.start_span( span_name="_schedule_dag_run", component="SchedulerJobRunner", links=links ) as span: span.set_attributes( @@ -1823,7 +1811,7 @@ def _schedule_dag_run( ) callback: DagCallbackRequest | None = None - dag = dag_run.dag = self.scheduler_dag_bag.get_dag(dag_run=dag_run, session=session) + dag = dag_run.dag = self.scheduler_dag_bag.get_dag_for_run(dag_run=dag_run, session=session) dag_model = DM.get_dagmodel(dag_run.dag_id, session) if not dag or not dag_model: @@ -1859,6 +1847,10 @@ def _schedule_dag_run( run_id=dag_run.run_id, bundle_name=dag_model.bundle_name, bundle_version=dag_run.bundle_version, + context_from_server=DagRunContext( + dag_run=dag_run, + last_ti=dag_run.get_last_ti(dag=dag, session=session), + ), is_failure_callback=True, msg="timed_out", ) @@ -1893,7 +1885,7 @@ def _schedule_dag_run( if ( dag_run.scheduled_by_job_id is not None and dag_run.scheduled_by_job_id != self.job.id - and self.active_spans.get(dag_run.run_id) is None + and self.active_spans.get("dr:" + str(dag_run.id)) is None ): # If the dag_run has been previously scheduled by another job and there is no active span, # then check if the job is still healthy. @@ -1937,7 +1929,7 @@ def _verify_integrity_if_dag_changed(self, dag_run: DagRun, session: Session) -> self.log.debug("DAG %s not changed structure, skipping dagrun.verify_integrity", dag_run.dag_id) return True # Refresh the DAG - dag_run.dag = self.scheduler_dag_bag.get_dag(dag_run=dag_run, session=session) + dag_run.dag = self.scheduler_dag_bag.get_dag_for_run(dag_run=dag_run, session=session) if not dag_run.dag: return False # Select all TIs in State.unfinished and update the dag_version_id @@ -1975,6 +1967,7 @@ def _handle_tasks_stuck_in_queued(self, session: Session = NEW_SESSION) -> None: self._maybe_requeue_stuck_ti( ti=ti, session=session, + executor=executor, ) session.commit() except NotImplementedError: @@ -1990,7 +1983,7 @@ def _get_tis_stuck_in_queued(self, session) -> Iterable[TaskInstance]: ) ) - def _maybe_requeue_stuck_ti(self, *, ti, session): + def _maybe_requeue_stuck_ti(self, *, ti, session, executor): """ Requeue task if it has not been attempted too many times. @@ -1998,7 +1991,7 @@ def _maybe_requeue_stuck_ti(self, *, ti, session): """ num_times_stuck = self._get_num_times_stuck_in_queued(ti, session) if num_times_stuck < self._num_stuck_queued_retries: - self.log.info("Task stuck in queued; will try to requeue. task_id=%s", ti.task_id) + self.log.info("Task stuck in queued; will try to requeue. task_instance=%s", ti) session.add( Log( event=TASK_STUCK_IN_QUEUED_RESCHEDULE_EVENT, @@ -2015,14 +2008,45 @@ def _maybe_requeue_stuck_ti(self, *, ti, session): "Task requeue attempts exceeded max; marking failed. task_instance=%s", ti, ) + msg = f"Task was requeued more than {self._num_stuck_queued_retries} times and will be failed." session.add( Log( event="stuck in queued tries exceeded", task_instance=ti.key, - extra=f"Task was requeued more than {self._num_stuck_queued_retries} times and will be failed.", + extra=msg, ) ) - ti.set_state(TaskInstanceState.FAILED, session=session) + + try: + dag = self.scheduler_dag_bag.get_dag_for_run(dag_run=ti.dag_run, session=session) + task = dag.get_task(ti.task_id) + except Exception: + self.log.warning( + "The DAG or task could not be found. If a failure callback exists, it will not be run.", + exc_info=True, + ) + else: + if task.on_failure_callback: + if inspect(ti).detached: + ti = session.merge(ti) + request = TaskCallbackRequest( + filepath=ti.dag_model.relative_fileloc, + bundle_name=ti.dag_version.bundle_name, + bundle_version=ti.dag_version.bundle_version, + ti=ti, + msg=msg, + context_from_server=TIRunContext( + dag_run=ti.dag_run, + max_tries=ti.max_tries, + variables=[], + connections=[], + xcom_keys_to_clear=[], + ), + ) + executor.send_callback(request) + finally: + ti.set_state(TaskInstanceState.FAILED, session=session) + executor.fail(ti.key) def _reschedule_stuck_task(self, ti: TaskInstance, session: Session): session.execute( @@ -2043,19 +2067,34 @@ def _get_num_times_stuck_in_queued(self, ti: TaskInstance, session: Session = NE We can then use this information to determine whether to reschedule a task or fail it. """ - return ( - session.query(Log) + last_running_time = session.scalar( + select(Log.dttm) .where( - Log.task_id == ti.task_id, Log.dag_id == ti.dag_id, + Log.task_id == ti.task_id, Log.run_id == ti.run_id, Log.map_index == ti.map_index, Log.try_number == ti.try_number, - Log.event == TASK_STUCK_IN_QUEUED_RESCHEDULE_EVENT, + Log.event == "running", ) - .count() + .order_by(desc(Log.dttm)) + .limit(1) + ) + + query = session.query(Log).where( + Log.task_id == ti.task_id, + Log.dag_id == ti.dag_id, + Log.run_id == ti.run_id, + Log.map_index == ti.map_index, + Log.try_number == ti.try_number, + Log.event == TASK_STUCK_IN_QUEUED_RESCHEDULE_EVENT, ) + if last_running_time: + query = query.where(Log.dttm > last_running_time) + + return query.count() + previous_ti_running_metrics: dict[tuple[str, str, str], int] = {} @provide_session @@ -2091,7 +2130,7 @@ def _emit_running_ti_metrics(self, session: Session = NEW_SESSION) -> None: def _emit_pool_metrics(self, session: Session = NEW_SESSION) -> None: from airflow.models.pool import Pool - with Trace.start_span(span_name="emit_pool_metrics", component="SchedulerJobRunner") as span: + with DebugTrace.start_span(span_name="emit_pool_metrics", component="SchedulerJobRunner") as span: pools = Pool.slots_stats(session=session) for pool_name, slot_stats in pools.items(): Stats.gauge(f"pool.open_slots.{pool_name}", slot_stats["open"]) @@ -2250,7 +2289,7 @@ def _find_task_instances_without_heartbeats(self, *, session: Session) -> list[T task_instances_without_heartbeats = session.scalars( select(TI) .options(selectinload(TI.dag_model)) - .options(selectinload(TI.dag_run)) + .options(selectinload(TI.dag_run).selectinload(DagRun.consumed_asset_events)) .options(selectinload(TI.dag_version)) .with_hint(TI, "USE INDEX (ti_state)", dialect_name="mysql") .join(DM, TI.dag_id == DM.dag_id) @@ -2281,6 +2320,13 @@ def _purge_task_instances_without_heartbeats( bundle_version=ti.dag_run.bundle_version, ti=ti, msg=str(task_instance_heartbeat_timeout_message_details), + context_from_server=TIRunContext( + dag_run=DRDataModel.model_validate(ti.dag_run, from_attributes=True), + max_tries=ti.max_tries, + variables=[], + connections=[], + xcom_keys_to_clear=[], + ), ) session.add( Log( @@ -2390,7 +2436,12 @@ def _activate_referenced_assets(assets: Collection[AssetModel], *, session: Sess def _generate_warning_message( offending: AssetModel, attr: str, value: str ) -> Iterator[tuple[str, str]]: - for ref in itertools.chain(offending.consuming_dags, offending.producing_tasks): + offending_references = itertools.chain( + offending.scheduled_dags, + offending.producing_tasks, + offending.consuming_tasks, + ) + for ref in offending_references: yield ( ref.dag_id, ( @@ -2480,3 +2531,7 @@ def _try_to_load_executor(self, executor_name: str | None) -> BaseExecutor | Non # ourselves here and the user should get some feedback about that. self.log.warning("Executor, %s, was not found but a Task was configured to use it", executor_name) return None + + +# Backcompat for older versions of task sdk import SchedulerDagBag from here +SchedulerDagBag = DBDagBag diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index 05ba3dd28adcf..b88f65868a037 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -28,8 +28,9 @@ from collections.abc import Generator, Iterable from contextlib import suppress from datetime import datetime +from socket import socket from traceback import format_exception -from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal, TypedDict, Union +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal, TypedDict import attrs import structlog @@ -37,41 +38,49 @@ from sqlalchemy import func, select from structlog.contextvars import bind_contextvars as bind_log_contextvars +from airflow._shared.timezones import timezone from airflow.configuration import conf from airflow.executors import workloads from airflow.jobs.base_job_runner import BaseJobRunner from airflow.jobs.job import perform_heartbeat from airflow.models.trigger import Trigger +from airflow.sdk.api.datamodels._generated import HITLDetailResponse from airflow.sdk.execution_time.comms import ( + CommsDecoder, ConnectionResult, DagRunStateResult, + DeleteVariable, + DeleteXCom, DRCount, ErrorResponse, GetConnection, GetDagRunState, GetDRCount, + GetHITLDetailResponse, GetTaskStates, GetTICount, GetVariable, GetXCom, + OKResponse, + PutVariable, + SetXCom, TaskStatesResult, TICount, + UpdateHITLDetail, VariableResult, XComResult, + _RequestFrame, ) from airflow.sdk.execution_time.supervisor import WatchedSubprocess, make_buffered_socket_reader from airflow.stats import Stats -from airflow.traces.tracer import Trace, add_span +from airflow.traces.tracer import DebugTrace, Trace, add_debug_span from airflow.triggers import base as events -from airflow.utils import timezone from airflow.utils.helpers import log_filename_template_renderer from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.module_loading import import_string from airflow.utils.session import provide_session if TYPE_CHECKING: - from socket import socket - from sqlalchemy.orm import Session from structlog.typing import FilteringBoundLogger, WrappedLogger @@ -81,14 +90,6 @@ from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI from airflow.triggers.base import BaseTrigger -HANDLER_SUPPORTS_TRIGGERER = False -""" -If this value is true, root handler is configured to log individual trigger messages -visible in task logs. - -:meta private: -""" - logger = logging.getLogger(__name__) __all__ = [ @@ -189,7 +190,6 @@ class messages: class StartTriggerer(BaseModel): """Tell the async trigger runner process to start, and where to send status update messages.""" - requests_fd: int type: Literal["StartTriggerer"] = "StartTriggerer" class TriggerStateChanges(BaseModel): @@ -217,19 +217,36 @@ class TriggerStateSync(BaseModel): to_cancel: set[int] +class HITLDetailResponseResult(HITLDetailResponse): + """Response to GetHITLDetailResponse request.""" + + type: Literal["HITLDetailResponseResult"] = "HITLDetailResponseResult" + + @classmethod + def from_api_response(cls, response: HITLDetailResponse) -> HITLDetailResponseResult: + """ + Create result class from API Response. + + API Response is autogenerated from the API schema, so we need to convert it to Result + for communication between the Supervisor and the task process since it needs a + discriminator field. + """ + return cls(**response.model_dump(exclude_defaults=True), type="HITLDetailResponseResult") + + ToTriggerRunner = Annotated[ - Union[ - messages.StartTriggerer, - messages.TriggerStateSync, - ConnectionResult, - VariableResult, - XComResult, - DagRunStateResult, - DRCount, - TICount, - TaskStatesResult, - ErrorResponse, - ], + messages.StartTriggerer + | messages.TriggerStateSync + | ConnectionResult + | VariableResult + | XComResult + | DagRunStateResult + | DRCount + | TICount + | TaskStatesResult + | HITLDetailResponseResult + | ErrorResponse + | OKResponse, Field(discriminator="type"), ] """ @@ -239,16 +256,20 @@ class TriggerStateSync(BaseModel): ToTriggerSupervisor = Annotated[ - Union[ - messages.TriggerStateChanges, - GetConnection, - GetVariable, - GetXCom, - GetTICount, - GetTaskStates, - GetDagRunState, - GetDRCount, - ], + messages.TriggerStateChanges + | GetConnection + | DeleteVariable + | GetVariable + | PutVariable + | DeleteXCom + | GetXCom + | SetXCom + | GetTICount + | GetTaskStates + | GetDagRunState + | GetDRCount + | GetHITLDetailResponse + | UpdateHITLDetail, Field(discriminator="type"), ] """ @@ -303,7 +324,7 @@ class TriggerRunnerSupervisor(WatchedSubprocess): """ TriggerRunnerSupervisor is responsible for monitoring the subprocess and marshalling DB access. - This class (which runs in the main process) is responsible for querying the DB, sending RunTrigger + This class (which runs in the main/sync process) is responsible for querying the DB, sending RunTrigger workload messages to the subprocess, and collecting results and updating them in the DB. """ @@ -350,8 +371,8 @@ def start( # type: ignore[override] ): proc = super().start(id=job.id, job=job, target=cls.run_in_process, logger=logger, **kwargs) - msg = messages.StartTriggerer(requests_fd=proc._requests_fd) - proc.send_msg(msg) + msg = messages.StartTriggerer() + proc.send_msg(msg, request_id=0) return proc @functools.cached_property @@ -360,10 +381,10 @@ def client(self) -> Client: client = Client(base_url=None, token="", dry_run=True, transport=in_process_api_server().transport) # Mypy is wrong -- the setter accepts a string on the property setter! `URLType = URL | str` - client.base_url = "http://in-process.invalid./" # type: ignore[assignment] + client.base_url = "http://in-process.invalid./" return client - def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger) -> None: # type: ignore[override] + def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, req_id: int) -> None: from airflow.sdk.api.datamodels._generated import ( ConnectionResponse, TaskStatesResponse, @@ -404,9 +425,12 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger) - if isinstance(conn, ConnectionResponse): conn_result = ConnectionResult.from_conn_response(conn) resp = conn_result - dump_opts = {"exclude_unset": True} + # `by_alias=True` is used to convert the `schema` field to `schema_` in the Connection model + dump_opts = {"exclude_unset": True, "by_alias": True} else: resp = conn + elif isinstance(msg, DeleteVariable): + resp = self.client.variables.delete(msg.key) elif isinstance(msg, GetVariable): var = self.client.variables.get(msg.key) if isinstance(var, VariableResponse): @@ -415,6 +439,10 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger) - dump_opts = {"exclude_unset": True} else: resp = var + elif isinstance(msg, PutVariable): + self.client.variables.set(msg.key, msg.value, msg.description) + elif isinstance(msg, DeleteXCom): + self.client.xcoms.delete(msg.dag_id, msg.run_id, msg.task_id, msg.key, msg.map_index) elif isinstance(msg, GetXCom): xcom = self.client.xcoms.get(msg.dag_id, msg.run_id, msg.task_id, msg.key, msg.map_index) if isinstance(xcom, XComResponse): @@ -423,6 +451,10 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger) - dump_opts = {"exclude_unset": True} else: resp = xcom + elif isinstance(msg, SetXCom): + self.client.xcoms.set( + msg.dag_id, msg.run_id, msg.task_id, msg.key, msg.value, msg.map_index, msg.mapped_length + ) elif isinstance(msg, GetDRCount): dr_count = self.client.dag_runs.get_count( dag_id=msg.dag_id, @@ -438,6 +470,7 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger) - elif isinstance(msg, GetTICount): resp = self.client.task_instances.get_count( dag_id=msg.dag_id, + map_index=msg.map_index, task_ids=msg.task_ids, task_group_id=msg.task_group_id, logical_dates=msg.logical_dates, @@ -448,6 +481,7 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger) - elif isinstance(msg, GetTaskStates): run_id_task_state_map = self.client.task_instances.get_task_states( dag_id=msg.dag_id, + map_index=msg.map_index, task_ids=msg.task_ids, task_group_id=msg.task_group_id, logical_dates=msg.logical_dates, @@ -457,11 +491,20 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger) - resp = TaskStatesResult.from_api_response(run_id_task_state_map) else: resp = run_id_task_state_map + elif isinstance(msg, UpdateHITLDetail): + api_resp = self.client.hitl.update_response( + ti_id=msg.ti_id, + chosen_options=msg.chosen_options, + params_input=msg.params_input, + ) + resp = HITLDetailResponseResult.from_api_response(response=api_resp) + elif isinstance(msg, GetHITLDetailResponse): + api_resp = self.client.hitl.get_detail_response(ti_id=msg.ti_id) + resp = HITLDetailResponseResult.from_api_response(response=api_resp) else: raise ValueError(f"Unknown message type {type(msg)}") - if resp: - self.send_msg(resp, **dump_opts) + self.send_msg(resp, request_id=req_id, error=None, **dump_opts) def run(self) -> None: """Run synchronously and handle all database reads/writes.""" @@ -473,7 +516,7 @@ def run(self) -> None: if not self.is_alive(): log.error("Trigger runner process has died! Exiting.") break - with Trace.start_span(span_name="triggerer_job_loop", component="TriggererJobRunner"): + with DebugTrace.start_span(span_name="triggerer_job_loop", component="TriggererJobRunner"): self.load_triggers() # Wait for up to 1 second for activity @@ -492,14 +535,14 @@ def heartbeat(self): def heartbeat_callback(self, session: Session | None = None) -> None: Stats.incr("triggerer_heartbeat", 1, 1) - @add_span + @add_debug_span def load_triggers(self): """Query the database for the triggers we're supposed to be running and update the runner.""" Trigger.assign_unassigned(self.job.id, self.capacity, self.health_check_threshold) ids = Trigger.ids_for_triggerer(self.job.id) self.update_triggers(set(ids)) - @add_span + @add_debug_span def handle_events(self): """Dispatch outbound events to the Trigger model which pushes them to the relevant task instances.""" while self.events: @@ -510,12 +553,12 @@ def handle_events(self): # Emit stat event Stats.incr("triggers.succeeded") - @add_span + @add_debug_span def clean_unused(self): """Clean out unused or finished triggers.""" Trigger.clean_unused() - @add_span + @add_debug_span def handle_failed_triggers(self): """ Handle "failed" triggers. - ones that errored or exited before they sent an event. @@ -560,13 +603,14 @@ def update_triggers(self, requested_trigger_ids: set[int]): self.running_triggers.union(x[0] for x in self.events) .union(self.cancelling_triggers) .union(trigger[0] for trigger in self.failed_triggers) + .union(trigger.id for trigger in self.creating_triggers) ) # Work out the two difference sets new_trigger_ids = requested_trigger_ids - known_trigger_ids cancel_trigger_ids = self.running_triggers - requested_trigger_ids # Bulk-fetch new trigger records new_triggers = Trigger.bulk_fetch(new_trigger_ids) - triggers_with_assets = Trigger.fetch_trigger_ids_with_asset() + trigger_ids_with_non_task_associations = Trigger.fetch_trigger_ids_with_non_task_associations() to_create: list[workloads.RunTrigger] = [] # Add in new triggers for new_id in new_trigger_ids: @@ -577,11 +621,11 @@ def update_triggers(self, requested_trigger_ids: set[int]): new_trigger_orm = new_triggers[new_id] - # If the trigger is not associated to a task or an asset, this means the TaskInstance + # If the trigger is not associated to a task, an asset, or a deadline, this means the TaskInstance # row was updated by either Trigger.submit_event or Trigger.submit_failure # and can happen when a single trigger Job is being run on multiple TriggerRunners # in a High-Availability setup. - if new_trigger_orm.task_instance is None and new_id not in triggers_with_assets: + if new_trigger_orm.task_instance is None and new_id not in trigger_ids_with_non_task_associations: log.info( ( "TaskInstance Trigger is None. It was likely updated by another trigger job. " @@ -634,7 +678,7 @@ def _register_pipe_readers(self, stdout: socket, stderr: socket, requests: socke ), ) - def _process_log_messages_from_subprocess(self) -> Generator[None, bytes, None]: + def _process_log_messages_from_subprocess(self) -> Generator[None, bytes | bytearray, None]: import msgspec from structlog.stdlib import NAME_TO_LEVEL @@ -697,14 +741,60 @@ class TriggerDetails(TypedDict): events: int +@attrs.define(kw_only=True) +class TriggerCommsDecoder(CommsDecoder[ToTriggerRunner, ToTriggerSupervisor]): + _async_writer: asyncio.StreamWriter = attrs.field(alias="async_writer") + _async_reader: asyncio.StreamReader = attrs.field(alias="async_reader") + + body_decoder: TypeAdapter[ToTriggerRunner] = attrs.field( + factory=lambda: TypeAdapter(ToTriggerRunner), repr=False + ) + + _lock: asyncio.Lock = attrs.field(factory=asyncio.Lock, repr=False) + + def _read_frame(self): + from asgiref.sync import async_to_sync + + return async_to_sync(self._aread_frame)() + + def send(self, msg: ToTriggerSupervisor) -> ToTriggerRunner | None: + from asgiref.sync import async_to_sync + + return async_to_sync(self.asend)(msg) + + async def _aread_frame(self): + len_bytes = await self._async_reader.readexactly(4) + length = int.from_bytes(len_bytes, byteorder="big") + if length >= 2**32: + raise OverflowError(f"Refusing to receive messages larger than 4GiB {length=}") + + buffer = await self._async_reader.readexactly(length) + return self.resp_decoder.decode(buffer) + + async def _aget_response(self, expect_id: int) -> ToTriggerRunner | None: + frame = await self._aread_frame() + if frame.id != expect_id: + # Given the lock we take out in `asend`, this _shouldn't_ be possible, but I'd rather fail with + # this explicit error return the wrong type of message back to a Trigger + raise RuntimeError(f"Response read out of order! Got {frame.id=}, {expect_id=}") + return self._from_frame(frame) + + async def asend(self, msg: ToTriggerSupervisor) -> ToTriggerRunner | None: + frame = _RequestFrame(id=next(self.id_counter), body=msg.model_dump()) + bytes = frame.as_bytes() + + async with self._lock: + self._async_writer.write(bytes) + + return await self._aget_response(frame.id) + + class TriggerRunner: """ Runtime environment for all triggers. - Mainly runs inside its own thread, where it hands control off to an asyncio - event loop, but is also sometimes interacted with from the main thread - (where all the DB queries are done). All communication between threads is - done via Deques. + Mainly runs inside its own process, where it hands control off to an asyncio + event loop. All communication between this and it's (sync) supervisor is done via sockets """ # Maps trigger IDs to their running tasks and other info @@ -732,10 +822,7 @@ class TriggerRunner: # TODO: connect this to the parent process log: FilteringBoundLogger = structlog.get_logger() - requests_sock: asyncio.StreamWriter - response_sock: asyncio.StreamReader - - decoder: TypeAdapter[ToTriggerRunner] + comms_decoder: TriggerCommsDecoder def __init__(self): super().__init__() @@ -746,7 +833,6 @@ def __init__(self): self.events = deque() self.failed_triggers = deque() self.job_id = None - self.decoder = TypeAdapter(ToTriggerRunner) def run(self): """Sync entrypoint - just run a run in an async loop.""" @@ -802,36 +888,21 @@ async def init_comms(self): """ from airflow.sdk.execution_time import task_runner - loop = asyncio.get_event_loop() + # Yes, we read and write to stdin! It's a socket, not a normal stdin. + reader, writer = await asyncio.open_connection(sock=socket(fileno=0)) - comms_decoder = task_runner.CommsDecoder[ToTriggerRunner, ToTriggerSupervisor]( - input=sys.stdin, - decoder=self.decoder, + self.comms_decoder = TriggerCommsDecoder( + async_writer=writer, + async_reader=reader, ) - task_runner.SUPERVISOR_COMMS = comms_decoder - - async def connect_stdin() -> asyncio.StreamReader: - reader = asyncio.StreamReader() - protocol = asyncio.StreamReaderProtocol(reader) - await loop.connect_read_pipe(lambda: protocol, sys.stdin) - return reader + task_runner.SUPERVISOR_COMMS = self.comms_decoder - self.response_sock = await connect_stdin() + msg = await self.comms_decoder._aget_response(expect_id=0) - line = await self.response_sock.readline() - - msg = self.decoder.validate_json(line) if not isinstance(msg, messages.StartTriggerer): raise RuntimeError(f"Required first message to be a messages.StartTriggerer, it was {msg}") - comms_decoder.request_socket = os.fdopen(msg.requests_fd, "wb", buffering=0) - writer_transport, writer_protocol = await loop.connect_write_pipe( - lambda: asyncio.streams.FlowControlMixin(loop=loop), - comms_decoder.request_socket, - ) - self.requests_sock = asyncio.streams.StreamWriter(writer_transport, writer_protocol, None, loop) - async def create_triggers(self): """Drain the to_create queue and create all new triggers that have been requested in the DB.""" while self.to_create: @@ -854,8 +925,16 @@ async def create_triggers(self): await asyncio.sleep(0) try: - kwargs = Trigger._decrypt_kwargs(workload.encrypted_kwargs) - trigger_instance = trigger_class(**kwargs) + from airflow.serialization.serialized_objects import smart_decode_trigger_kwargs + + # Decrypt and clean trigger kwargs before for execution + # Note: We only clean up serialization artifacts (__var, __type keys) here, + # not in `_decrypt_kwargs` because it is used during hash comparison in + # add_asset_trigger_references and could lead to adverse effects like hash mismatches + # that could cause None values in collections. + kw = Trigger._decrypt_kwargs(workload.encrypted_kwargs) + deserialised_kwargs = {k: smart_decode_trigger_kwargs(v) for k, v in kw.items()} + trigger_instance = trigger_class(**deserialised_kwargs) except TypeError as err: self.log.error("Trigger failed to inflate", error=err) self.failed_triggers.append((trigger_id, err)) @@ -940,8 +1019,6 @@ async def cleanup_finished_triggers(self) -> list[int]: return finished_ids async def sync_state_to_supervisor(self, finished_ids: list[int]): - from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS - # Copy out of our deques in threadsafe manner to sync state with parent events_to_send = [] while self.events: @@ -967,19 +1044,17 @@ async def sync_state_to_supervisor(self, finished_ids: list[int]): if not finished_ids: msg.finished = None - # Block triggers from making any requests for the duration of this - async with SUPERVISOR_COMMS.lock: - # Tell the monitor that we've finished triggers so it can update things - self.requests_sock.write(msg.model_dump_json(exclude_none=True).encode() + b"\n") - line = await self.response_sock.readline() - - if line == b"": # EoF received! + # Tell the monitor that we've finished triggers so it can update things + try: + resp = await self.comms_decoder.asend(msg) + except asyncio.IncompleteReadError: if task := asyncio.current_task(): task.cancel("EOF - shutting down") + return + raise - resp = self.decoder.validate_json(line) if not isinstance(resp, messages.TriggerStateSync): - raise RuntimeError(f"Expected to get a TriggerStateSync message, instead we got f{type(msg)}") + raise RuntimeError(f"Expected to get a TriggerStateSync message, instead we got {type(msg)}") self.to_create.extend(resp.to_create) self.to_cancel.extend(resp.to_cancel) diff --git a/airflow-core/src/airflow/lineage/hook.py b/airflow-core/src/airflow/lineage/hook.py index 9aeb65c277147..b69f12484dc1b 100644 --- a/airflow-core/src/airflow/lineage/hook.py +++ b/airflow-core/src/airflow/lineage/hook.py @@ -20,7 +20,7 @@ import hashlib import json from collections import defaultdict -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING, TypeAlias import attr @@ -29,11 +29,10 @@ from airflow.utils.log.logging_mixin import LoggingMixin if TYPE_CHECKING: - from airflow.hooks.base import BaseHook - from airflow.sdk import ObjectStoragePath + from airflow.sdk import BaseHook, ObjectStoragePath # Store context what sent lineage. - LineageContext = Union[BaseHook, ObjectStoragePath] + LineageContext: TypeAlias = BaseHook | ObjectStoragePath _hook_lineage_collector: HookLineageCollector | None = None diff --git a/airflow-core/src/airflow/logging_config.py b/airflow-core/src/airflow/logging_config.py index b0c0b35515599..e6d837bc22077 100644 --- a/airflow-core/src/airflow/logging_config.py +++ b/airflow-core/src/airflow/logging_config.py @@ -33,6 +33,7 @@ REMOTE_TASK_LOG: RemoteLogIO | None +DEFAULT_REMOTE_CONN_ID: str | None = None def __getattr__(name: str): @@ -44,7 +45,7 @@ def __getattr__(name: str): def load_logging_config() -> tuple[dict[str, Any], str]: """Configure & Validate Airflow Logging.""" - global REMOTE_TASK_LOG + global REMOTE_TASK_LOG, DEFAULT_REMOTE_CONN_ID fallback = "airflow.config_templates.airflow_local_settings.DEFAULT_LOGGING_CONFIG" logging_class_path = conf.get("logging", "logging_config_class", fallback=fallback) @@ -70,10 +71,11 @@ def load_logging_config() -> tuple[dict[str, Any], str]: f"to: {type(err).__name__}:{err}" ) else: - mod = logging_class_path.rsplit(".", 1)[0] + modpath = logging_class_path.rsplit(".", 1)[0] try: - remote_task_log = import_string(f"{mod}.REMOTE_TASK_LOG") - REMOTE_TASK_LOG = remote_task_log + mod = import_string(modpath) + REMOTE_TASK_LOG = getattr(mod, "REMOTE_TASK_LOG") + DEFAULT_REMOTE_CONN_ID = getattr(mod, "DEFAULT_REMOTE_CONN_ID", None) except Exception as err: log.info("Remote task logs will not be available due to an error: %s", err) diff --git a/airflow-core/src/airflow/macros/__init__.py b/airflow-core/src/airflow/macros/__init__.py index 4d858a1c751d3..7dc1ae44e1dff 100644 --- a/airflow-core/src/airflow/macros/__init__.py +++ b/airflow-core/src/airflow/macros/__init__.py @@ -17,17 +17,9 @@ # under the License. from __future__ import annotations -from airflow.sdk.definitions.macros import ( # noqa: F401 - datetime, - datetime_diff_for_humans, - dateutil, - ds_add, - ds_format, - ds_format_locale, - json, - random, - time, - timedelta, - uuid, - yaml, +from airflow.utils.deprecation_tools import add_deprecated_classes + +add_deprecated_classes( + {__name__: {"*": "airflow.sdk.execution_time.macros"}}, + package=__name__, ) diff --git a/airflow-core/src/airflow/metrics/otel_logger.py b/airflow-core/src/airflow/metrics/otel_logger.py index a8ad9d8ce2eb1..317b70a5ca29d 100644 --- a/airflow-core/src/airflow/metrics/otel_logger.py +++ b/airflow-core/src/airflow/metrics/otel_logger.py @@ -20,7 +20,8 @@ import logging import random import warnings -from typing import TYPE_CHECKING, Callable, Union +from collections.abc import Callable +from typing import TYPE_CHECKING from opentelemetry import metrics from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter @@ -47,7 +48,7 @@ log = logging.getLogger(__name__) -GaugeValues = Union[int, float] +GaugeValues = int | float DEFAULT_GAUGE_VALUE = 0.0 diff --git a/airflow-core/src/airflow/metrics/protocols.py b/airflow-core/src/airflow/metrics/protocols.py index 3c405aef37e7a..d170fe29ad9ba 100644 --- a/airflow-core/src/airflow/metrics/protocols.py +++ b/airflow-core/src/airflow/metrics/protocols.py @@ -19,12 +19,12 @@ import datetime import time -from typing import TYPE_CHECKING, Protocol, Union +from typing import TYPE_CHECKING, Protocol if TYPE_CHECKING: from airflow.typing_compat import Self -DeltaType = Union[int, float, datetime.timedelta] +DeltaType = int | float | datetime.timedelta class TimerProtocol(Protocol): diff --git a/airflow-core/src/airflow/metrics/statsd_logger.py b/airflow-core/src/airflow/metrics/statsd_logger.py index 8d47bc9ae132a..d952693eb230d 100644 --- a/airflow-core/src/airflow/metrics/statsd_logger.py +++ b/airflow-core/src/airflow/metrics/statsd_logger.py @@ -18,8 +18,9 @@ from __future__ import annotations import logging +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Callable, TypeVar, cast +from typing import TYPE_CHECKING, TypeVar, cast from airflow.configuration import conf from airflow.exceptions import AirflowConfigException diff --git a/airflow-core/src/airflow/metrics/validators.py b/airflow-core/src/airflow/metrics/validators.py index 85152426fdd17..252524455e022 100644 --- a/airflow-core/src/airflow/metrics/validators.py +++ b/airflow-core/src/airflow/metrics/validators.py @@ -24,10 +24,10 @@ import re import string import warnings -from collections.abc import Iterable +from collections.abc import Callable, Iterable from functools import partial, wraps from re import Pattern -from typing import Callable, cast +from typing import cast from airflow.configuration import conf from airflow.exceptions import InvalidStatsNameException diff --git a/airflow-core/src/airflow/migrations/utils.py b/airflow-core/src/airflow/migrations/utils.py index 9305606873549..2dbbbece01a57 100644 --- a/airflow-core/src/airflow/migrations/utils.py +++ b/airflow-core/src/airflow/migrations/utils.py @@ -16,6 +16,7 @@ # under the License. from __future__ import annotations +import contextlib from collections import defaultdict from contextlib import contextmanager @@ -103,3 +104,11 @@ def mysql_drop_index_if_exists(index_name, table_name, op): SELECT 1; END IF; """) + + +def ignore_sqlite_value_error(): + from alembic import op + + if op.get_bind().dialect.name == "sqlite": + return contextlib.suppress(ValueError) + return contextlib.nullcontext() diff --git a/airflow-core/src/airflow/migrations/versions/0017_2_9_2_fix_inconsistency_between_ORM_and_migration_files.py b/airflow-core/src/airflow/migrations/versions/0017_2_9_2_fix_inconsistency_between_ORM_and_migration_files.py index 0a62b550d40b9..fa24916df6faa 100644 --- a/airflow-core/src/airflow/migrations/versions/0017_2_9_2_fix_inconsistency_between_ORM_and_migration_files.py +++ b/airflow-core/src/airflow/migrations/versions/0017_2_9_2_fix_inconsistency_between_ORM_and_migration_files.py @@ -243,8 +243,12 @@ def upgrade(): ) """) ) - - conn.execute(sa.text("INSERT INTO dag_run_new SELECT * FROM dag_run")) + headers = ( + "id, dag_id, queued_at, execution_date, start_date, end_date, state, run_id, creating_job_id, " + "external_trigger, run_type, conf, data_interval_start, data_interval_end, " + "last_scheduling_decision, dag_hash, log_template_id, updated_at, clear_number" + ) + conn.execute(sa.text(f"INSERT INTO dag_run_new ({headers}) SELECT {headers} FROM dag_run")) conn.execute(sa.text("DROP TABLE dag_run")) conn.execute(sa.text("ALTER TABLE dag_run_new RENAME TO dag_run")) conn.execute(sa.text("PRAGMA foreign_keys=on")) diff --git a/airflow-core/src/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py b/airflow-core/src/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py index 383319cd6d9e7..4387728020e77 100644 --- a/airflow-core/src/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py +++ b/airflow-core/src/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py @@ -28,7 +28,6 @@ from __future__ import annotations from alembic import op -from sqlalchemy import inspect # revision identifiers, used by Alembic. revision = "044f740568ec" @@ -38,54 +37,28 @@ airflow_version = "3.0.0" -def table_exists(table_name): - """Check if a table exists in the database.""" - inspector = inspect(op.get_bind()) - return table_name in inspector.get_table_names() - - -def constraint_exists(table_name, constraint_name): - """Check if a foreign key constraint exists on a table.""" - inspector = inspect(op.get_bind()) - foreign_keys = inspector.get_foreign_keys(table_name) - return any(fk["name"] == constraint_name for fk in foreign_keys) - - -def index_exists(table_name, index_name): - """Check if an index exists on a table.""" - inspector = inspect(op.get_bind()) - indexes = inspector.get_indexes(table_name) - return any(idx["name"] == index_name for idx in indexes) - - def upgrade(): """Apply Drop ab_user.id foreign key.""" - if constraint_exists("dag_run_note", "dag_run_note_user_fkey"): - with op.batch_alter_table("dag_run_note", schema=None) as batch_op: - batch_op.drop_constraint("dag_run_note_user_fkey", type_="foreignkey") - - if constraint_exists("task_instance_note", "task_instance_note_user_fkey"): - with op.batch_alter_table("task_instance_note", schema=None) as batch_op: - batch_op.drop_constraint("task_instance_note_user_fkey", type_="foreignkey") + with op.batch_alter_table("dag_run_note", schema=None) as batch_op: + batch_op.drop_constraint("dag_run_note_user_fkey", type_="foreignkey") + with op.batch_alter_table("task_instance_note", schema=None) as batch_op: + batch_op.drop_constraint("task_instance_note_user_fkey", type_="foreignkey") if op.get_bind().dialect.name == "mysql": - if index_exists("dag_run_note", "dag_run_note_user_fkey"): - with op.batch_alter_table("dag_run_note", schema=None) as batch_op: - batch_op.drop_index("dag_run_note_user_fkey") + with op.batch_alter_table("dag_run_note", schema=None) as batch_op: + batch_op.drop_index("dag_run_note_user_fkey") - if index_exists("task_instance_note", "task_instance_note_user_fkey"): - with op.batch_alter_table("task_instance_note", schema=None) as batch_op: - batch_op.drop_index("task_instance_note_user_fkey") + with op.batch_alter_table("task_instance_note", schema=None) as batch_op: + batch_op.drop_index("task_instance_note_user_fkey") def downgrade(): """Unapply Drop ab_user.id foreign key.""" - if table_exists("ab_user"): - with op.batch_alter_table("task_instance_note", schema=None) as batch_op: - batch_op.create_foreign_key("task_instance_note_user_fkey", "ab_user", ["user_id"], ["id"]) + with op.batch_alter_table("task_instance_note", schema=None) as batch_op: + batch_op.create_foreign_key("task_instance_note_user_fkey", "ab_user", ["user_id"], ["id"]) - with op.batch_alter_table("dag_run_note", schema=None) as batch_op: - batch_op.create_foreign_key("dag_run_note_user_fkey", "ab_user", ["user_id"], ["id"]) + with op.batch_alter_table("dag_run_note", schema=None) as batch_op: + batch_op.create_foreign_key("dag_run_note_user_fkey", "ab_user", ["user_id"], ["id"]) if op.get_bind().dialect.name == "mysql": with op.batch_alter_table("task_instance_note", schema=None) as batch_op: diff --git a/airflow-core/src/airflow/migrations/versions/0030_3_0_0_rename_schedule_interval_to_timetable_.py b/airflow-core/src/airflow/migrations/versions/0030_3_0_0_rename_schedule_interval_to_timetable_.py index b4434a65f3f48..16f782933100d 100644 --- a/airflow-core/src/airflow/migrations/versions/0030_3_0_0_rename_schedule_interval_to_timetable_.py +++ b/airflow-core/src/airflow/migrations/versions/0030_3_0_0_rename_schedule_interval_to_timetable_.py @@ -58,3 +58,4 @@ def downgrade(): type_=sa.Text, nullable=True, ) + op.execute("UPDATE dag SET schedule_interval=NULL;") diff --git a/airflow-core/src/airflow/migrations/versions/0036_3_0_0_add_name_field_to_dataset_model.py b/airflow-core/src/airflow/migrations/versions/0036_3_0_0_add_name_field_to_dataset_model.py index c7112f91b3c5e..b1f925dbffca2 100644 --- a/airflow-core/src/airflow/migrations/versions/0036_3_0_0_add_name_field_to_dataset_model.py +++ b/airflow-core/src/airflow/migrations/versions/0036_3_0_0_add_name_field_to_dataset_model.py @@ -48,7 +48,7 @@ _STRING_COLUMN_TYPE = sa.String(length=1500).with_variant( sa.String(length=1500, collation="latin1_general_cs"), - dialect_name="mysql", + "mysql", ) @@ -128,7 +128,7 @@ def downgrade(): "uri", type_=sa.String(length=3000).with_variant( sa.String(length=3000, collation="latin1_general_cs"), - dialect_name="mysql", + "mysql", ), nullable=False, ) diff --git a/airflow-core/src/airflow/migrations/versions/0038_3_0_0_add_asset_active.py b/airflow-core/src/airflow/migrations/versions/0038_3_0_0_add_asset_active.py index c924e0157bd5e..2a992cab4126e 100644 --- a/airflow-core/src/airflow/migrations/versions/0038_3_0_0_add_asset_active.py +++ b/airflow-core/src/airflow/migrations/versions/0038_3_0_0_add_asset_active.py @@ -39,7 +39,7 @@ _STRING_COLUMN_TYPE = sa.String(length=1500).with_variant( sa.String(length=1500, collation="latin1_general_cs"), - dialect_name="mysql", + "mysql", ) diff --git a/airflow-core/src/airflow/migrations/versions/0039_3_0_0_tweak_assetaliasmodel_to_match_asset.py b/airflow-core/src/airflow/migrations/versions/0039_3_0_0_tweak_assetaliasmodel_to_match_asset.py index 7cc2423590440..d0067f1288255 100644 --- a/airflow-core/src/airflow/migrations/versions/0039_3_0_0_tweak_assetaliasmodel_to_match_asset.py +++ b/airflow-core/src/airflow/migrations/versions/0039_3_0_0_tweak_assetaliasmodel_to_match_asset.py @@ -51,7 +51,7 @@ _STRING_COLUMN_TYPE = sa.String(length=1500).with_variant( sa.String(length=1500, collation="latin1_general_cs"), - dialect_name="mysql", + "mysql", ) @@ -77,7 +77,7 @@ def downgrade(): "name", type_=sa.String(length=3000).with_variant( sa.String(length=3000, collation="latin1_general_cs"), - dialect_name="mysql", + "mysql", ), nullable=False, ) diff --git a/airflow-core/src/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py b/airflow-core/src/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py index 3f4d5c4e926f1..df7965d38e934 100644 --- a/airflow-core/src/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py +++ b/airflow-core/src/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py @@ -206,7 +206,10 @@ def upgrade(): op.execute(pg_uuid7_fn) # Migrate existing rows with UUID v7 using a timestamp-based generation + batch_num = 0 while True: + batch_num += 1 + print(f"processing batch {batch_num}") result = conn.execute( text( """ @@ -223,7 +226,9 @@ def upgrade(): """ ).bindparams(batch_size=batch_size) ) - row_count = result.rowcount + row_count = 0 + if result: + row_count = result.rowcount if row_count == 0: break print(f"Migrated {row_count} task_instance rows in this batch...") diff --git a/airflow-core/src/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py b/airflow-core/src/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py index bd0a8ea725f89..184c0a98218e0 100644 --- a/airflow-core/src/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py +++ b/airflow-core/src/airflow/migrations/versions/0047_3_0_0_add_dag_versioning.py @@ -30,12 +30,11 @@ import sqlalchemy as sa from alembic import op from sqlalchemy_utils import UUIDType -from uuid6 import uuid7 +from airflow._shared.timezones import timezone from airflow.migrations.db_types import TIMESTAMP, StringID +from airflow.migrations.utils import ignore_sqlite_value_error from airflow.models.base import naming_convention -from airflow.models.dagcode import DagCode -from airflow.utils import timezone # revision identifiers, used by Alembic. revision = "2b47dc6bc8df" @@ -45,27 +44,11 @@ airflow_version = "3.0.0" -def _get_rows(sql, conn): - stmt = sa.text(sql) - rows = conn.execute(stmt) - if rows: - rows = rows.fetchall() - else: - rows = [] - return rows - - -def _airflow_2_fileloc_hash(fileloc): - import hashlib - import struct - - # Only 7 bytes because MySQL BigInteger can hold only 8 bytes (signed). - return struct.unpack(">Q", hashlib.sha1(fileloc.encode("utf-8")).digest()[-8:])[0] >> 8 - - def upgrade(): """Apply add dag versioning.""" - conn = op.get_bind() + op.execute("delete from dag_code;") + op.execute("delete from serialized_dag;") + op.create_table( "dag_version", sa.Column("id", UUIDType(binary=False), nullable=False), @@ -73,173 +56,33 @@ def upgrade(): sa.Column("dag_id", StringID(), nullable=False), sa.Column("created_at", TIMESTAMP(), nullable=False, default=timezone.utcnow), sa.Column( - "last_updated", TIMESTAMP(), nullable=False, default=timezone.utcnow, onupdate=timezone.utcnow + "last_updated", + TIMESTAMP(), + nullable=False, + default=timezone.utcnow, + onupdate=timezone.utcnow, ), sa.ForeignKeyConstraint( - ("dag_id",), ["dag.dag_id"], name=op.f("dag_version_dag_id_fkey"), ondelete="CASCADE" + ("dag_id",), + ["dag.dag_id"], + name=op.f("dag_version_dag_id_fkey"), + ondelete="CASCADE", ), sa.PrimaryKeyConstraint("id", name=op.f("dag_version_pkey")), sa.UniqueConstraint("dag_id", "version_number", name="dag_id_v_name_v_number_unique_constraint"), ) - with op.batch_alter_table( - "dag_code", - ) as batch_op: - batch_op.drop_constraint("dag_code_pkey", type_="primary") - batch_op.add_column(sa.Column("id", UUIDType(binary=False))) - batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False))) - batch_op.add_column(sa.Column("source_code_hash", sa.String(length=32))) - batch_op.add_column(sa.Column("dag_id", StringID())) - batch_op.add_column(sa.Column("created_at", TIMESTAMP(), default=timezone.utcnow)) - with op.batch_alter_table( - "serialized_dag", - ) as batch_op: - batch_op.add_column(sa.Column("id", UUIDType(binary=False))) - batch_op.drop_index("idx_fileloc_hash") - batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False))) - batch_op.add_column(sa.Column("created_at", TIMESTAMP(), default=timezone.utcnow)) - - # Data migration - rows = _get_rows("SELECT dag_id FROM serialized_dag", conn) - - stmt = sa.text(""" - UPDATE serialized_dag - SET id = :_id - WHERE dag_id = :dag_id AND id IS NULL - """) - - for row in rows: - id = uuid7() - if conn.dialect.name != "postgresql": - id = id.hex - else: - id = str(id) - - conn.execute(stmt.bindparams(_id=id, dag_id=row.dag_id)) - id2 = uuid7() - if conn.dialect.name != "postgresql": - id2 = id2.hex - else: - id2 = str(id2) - # Update dagversion table - conn.execute( - sa.text(""" - INSERT INTO dag_version (id, version_number, dag_id, created_at, last_updated) - VALUES (:id, 1, :dag_id, :created_at, :last_updated) - """).bindparams( - id=id2, dag_id=row.dag_id, created_at=timezone.utcnow(), last_updated=timezone.utcnow() - ) - ) - - # Update serialized_dag table with dag_version_id where dag_id matches - if conn.dialect.name == "mysql": - conn.execute( - sa.text(""" - UPDATE serialized_dag sd - JOIN dag_version dv ON sd.dag_id = dv.dag_id - SET sd.dag_version_id = dv.id, - sd.created_at = dv.created_at - """) - ) - else: - conn.execute( - sa.text(""" - UPDATE serialized_dag - SET dag_version_id = dag_version.id, - created_at = dag_version.created_at - FROM dag_version - WHERE serialized_dag.dag_id = dag_version.dag_id - """) - ) - # Update dag_code table where fileloc_hash of serialized_dag matches - if conn.dialect.name == "mysql": - conn.execute( - sa.text(""" - UPDATE dag_code dc - JOIN serialized_dag sd ON dc.fileloc_hash = sd.fileloc_hash - SET dc.dag_version_id = sd.dag_version_id, - dc.created_at = sd.created_at, - dc.dag_id = sd.dag_id - """) - ) - else: - conn.execute( - sa.text(""" - UPDATE dag_code - SET dag_version_id = dag_version.id, - created_at = serialized_dag.created_at, - dag_id = serialized_dag.dag_id - FROM serialized_dag, dag_version - WHERE dag_code.fileloc_hash = serialized_dag.fileloc_hash - AND serialized_dag.dag_version_id = dag_version.id - """) - ) - - # select all rows in serialized_dag where the dag_id is not in dag_code - - stmt = """ - SELECT dag_id, fileloc, fileloc_hash, dag_version_id - FROM serialized_dag - WHERE dag_id NOT IN (SELECT dag_id FROM dag_code) - AND dag_id in (SELECT dag_id FROM dag) - """ - rows = _get_rows(stmt, conn) - # Insert the missing rows from serialized_dag to dag_code - stmt = sa.text(""" - INSERT INTO dag_code (dag_version_id, dag_id, fileloc, fileloc_hash, source_code, last_updated, created_at) - VALUES (:dag_version_id, :dag_id, :fileloc, :fileloc_hash, :source_code, :last_updated, :created_at) - """) - for row in rows: - try: - source_code = DagCode.get_code_from_file(row.fileloc) - except FileNotFoundError: - source_code = "source_code" - conn.execute( - stmt.bindparams( - dag_version_id=row.dag_version_id, - dag_id=row.dag_id, - fileloc=row.fileloc, - fileloc_hash=row.fileloc_hash, - source_code=source_code, - last_updated=timezone.utcnow(), - created_at=timezone.utcnow(), - ) - ) - - stmt = "SELECT dag_id, fileloc FROM dag_code" - rows = _get_rows(stmt, conn) - stmt = sa.text(""" - UPDATE dag_code - SET id = :_id, - dag_id = :dag_id, - source_code = :source_code, - source_code_hash = :source_code_hash - WHERE dag_id = :dag_id AND id IS NULL - """) - for row in rows: - id = uuid7() - if conn.dialect.name != "postgresql": - id = id.hex - else: - id = str(id) - try: - source_code = DagCode.get_code_from_file(row.fileloc) - except FileNotFoundError: - source_code = "source_code" - conn.execute( - stmt.bindparams( - _id=id, - source_code_hash=DagCode.dag_source_hash(source_code), - source_code=source_code, - dag_id=row.dag_id, - ) - ) + with ignore_sqlite_value_error(), op.batch_alter_table("dag_code") as batch_op: + batch_op.drop_constraint("dag_code_pkey", type_="primary") with op.batch_alter_table("dag_code") as batch_op: - batch_op.alter_column("dag_id", existing_type=StringID(), nullable=False) - batch_op.alter_column("id", existing_type=UUIDType(binary=False), nullable=False) + batch_op.drop_column("fileloc_hash") + batch_op.add_column(sa.Column("id", UUIDType(binary=False), nullable=False)) batch_op.create_primary_key("dag_code_pkey", ["id"]) - batch_op.alter_column("dag_version_id", existing_type=UUIDType(binary=False), nullable=False) + batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False), nullable=False)) + batch_op.add_column(sa.Column("source_code_hash", sa.String(length=32), nullable=False)) + batch_op.add_column(sa.Column("dag_id", StringID(), nullable=False)) + batch_op.add_column(sa.Column("created_at", TIMESTAMP(), default=timezone.utcnow, nullable=False)) batch_op.create_foreign_key( batch_op.f("dag_code_dag_version_id_fkey"), "dag_version", @@ -248,16 +91,17 @@ def upgrade(): ondelete="CASCADE", ) batch_op.create_unique_constraint("dag_code_dag_version_id_uq", ["dag_version_id"]) - batch_op.drop_column("fileloc_hash") - batch_op.alter_column("source_code_hash", existing_type=sa.String(length=32), nullable=False) - batch_op.alter_column("created_at", existing_type=TIMESTAMP(), nullable=False) - with op.batch_alter_table("serialized_dag") as batch_op: + with ignore_sqlite_value_error(), op.batch_alter_table("serialized_dag") as batch_op: batch_op.drop_constraint("serialized_dag_pkey", type_="primary") - batch_op.alter_column("id", existing_type=UUIDType(binary=False), nullable=False) - batch_op.alter_column("dag_version_id", existing_type=UUIDType(binary=False), nullable=False) + + with op.batch_alter_table("serialized_dag") as batch_op: + batch_op.drop_index("idx_fileloc_hash") batch_op.drop_column("fileloc_hash") batch_op.drop_column("fileloc") + batch_op.add_column(sa.Column("id", UUIDType(binary=False), nullable=False)) + batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False), nullable=False)) + batch_op.add_column(sa.Column("created_at", TIMESTAMP(), default=timezone.utcnow, nullable=False)) batch_op.create_primary_key("serialized_dag_pkey", ["id"]) batch_op.create_foreign_key( batch_op.f("serialized_dag_dag_version_id_fkey"), @@ -267,7 +111,6 @@ def upgrade(): ondelete="CASCADE", ) batch_op.create_unique_constraint("serialized_dag_dag_version_id_uq", ["dag_version_id"]) - batch_op.alter_column("created_at", existing_type=TIMESTAMP(), nullable=False) with op.batch_alter_table("task_instance", schema=None) as batch_op: batch_op.add_column(sa.Column("dag_version_id", UUIDType(binary=False))) @@ -296,113 +139,40 @@ def upgrade(): def downgrade(): """Unapply add dag versioning.""" - conn = op.get_bind() + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f("task_instance_dag_version_id_fkey"), type_="foreignkey") + batch_op.drop_column("dag_version_id") with op.batch_alter_table("task_instance_history", schema=None) as batch_op: batch_op.drop_column("dag_version_id") - with op.batch_alter_table("task_instance", schema=None) as batch_op: - batch_op.drop_constraint(batch_op.f("task_instance_dag_version_id_fkey"), type_="foreignkey") - batch_op.drop_column("dag_version_id") + with op.batch_alter_table("dag_run", schema=None) as batch_op: + batch_op.add_column(sa.Column("dag_hash", sa.String(length=32), autoincrement=False, nullable=True)) + batch_op.drop_constraint("created_dag_version_id_fkey", type_="foreignkey") + batch_op.drop_column("created_dag_version_id") + + op.execute("delete from dag_code;") + op.execute("delete from serialized_dag;") with op.batch_alter_table("dag_code", schema=None) as batch_op: + batch_op.drop_constraint("dag_code_pkey", type_="primary") batch_op.drop_constraint(batch_op.f("dag_code_dag_version_id_fkey"), type_="foreignkey") batch_op.add_column(sa.Column("fileloc_hash", sa.BigInteger, nullable=True)) + batch_op.create_primary_key("dag_code_pkey", ["fileloc_hash"]) batch_op.drop_column("source_code_hash") batch_op.drop_column("created_at") - - # Update the added fileloc_hash with the hash of fileloc - stmt = "SELECT fileloc FROM dag_code" - rows = _get_rows(stmt, conn) - stmt = sa.text(""" - UPDATE dag_code - SET fileloc_hash = :_hash - where fileloc = :fileloc and fileloc_hash is null - """) - for row in rows: - hash = _airflow_2_fileloc_hash(row.fileloc) - conn.execute(stmt.bindparams(_hash=hash, fileloc=row.fileloc)) + batch_op.drop_column("id") + batch_op.drop_column("dag_version_id") + batch_op.drop_column("dag_id") with op.batch_alter_table("serialized_dag", schema=None, naming_convention=naming_convention) as batch_op: batch_op.drop_column("id") - batch_op.add_column(sa.Column("fileloc", sa.String(length=2000), nullable=True)) - batch_op.add_column(sa.Column("fileloc_hash", sa.BIGINT(), nullable=True)) - batch_op.drop_constraint(batch_op.f("serialized_dag_dag_version_id_fkey"), type_="foreignkey") batch_op.drop_column("created_at") - - # Update the serialized fileloc with fileloc from dag_code where dag_version_id matches - if conn.dialect.name == "mysql": - conn.execute( - sa.text(""" - UPDATE serialized_dag sd - JOIN dag_code dc ON sd.dag_version_id = dc.dag_version_id - SET sd.fileloc = dc.fileloc, - sd.fileloc_hash = dc.fileloc_hash - """) - ) - else: - conn.execute( - sa.text(""" - UPDATE serialized_dag - SET fileloc = dag_code.fileloc, - fileloc_hash = dag_code.fileloc_hash - FROM dag_code - WHERE serialized_dag.dag_version_id = dag_code.dag_version_id - """) - ) - # Deduplicate the rows in dag_code with the same fileloc_hash so we can make fileloc_hash the primary key - stmt = sa.text(""" - WITH ranked_rows AS ( - SELECT - fileloc_hash, - ROW_NUMBER() OVER (PARTITION BY fileloc_hash ORDER BY id) as row_num - FROM dag_code - ) - DELETE FROM dag_code - WHERE EXISTS ( - SELECT 1 - FROM ranked_rows - WHERE ranked_rows.fileloc_hash = dag_code.fileloc_hash - AND ranked_rows.row_num > 1 - ); - """) - conn.execute(stmt) - with op.batch_alter_table("serialized_dag") as batch_op: batch_op.drop_column("dag_version_id") + batch_op.add_column(sa.Column("fileloc", sa.String(length=2000), nullable=False)) + batch_op.add_column(sa.Column("fileloc_hash", sa.BIGINT(), nullable=False)) batch_op.create_index("idx_fileloc_hash", ["fileloc_hash"], unique=False) batch_op.create_primary_key("serialized_dag_pkey", ["dag_id"]) - batch_op.alter_column("fileloc", existing_type=sa.String(length=2000), nullable=False) - batch_op.alter_column("fileloc_hash", existing_type=sa.BIGINT(), nullable=False) - - with op.batch_alter_table("dag_code") as batch_op: - batch_op.drop_column("id") - batch_op.create_primary_key("dag_code_pkey", ["fileloc_hash"]) - batch_op.drop_column("dag_version_id") - batch_op.drop_column("dag_id") - - with op.batch_alter_table("dag_run", schema=None) as batch_op: - batch_op.add_column(sa.Column("dag_hash", sa.String(length=32), autoincrement=False, nullable=True)) - batch_op.drop_constraint("created_dag_version_id_fkey", type_="foreignkey") - batch_op.drop_column("created_dag_version_id") - - # Update dag_run dag_hash with dag_hash from serialized_dag where dag_id matches - if conn.dialect.name == "mysql": - conn.execute( - sa.text(""" - UPDATE dag_run dr - JOIN serialized_dag sd ON dr.dag_id = sd.dag_id - SET dr.dag_hash = sd.dag_hash - """) - ) - else: - conn.execute( - sa.text(""" - UPDATE dag_run - SET dag_hash = serialized_dag.dag_hash - FROM serialized_dag - WHERE dag_run.dag_id = serialized_dag.dag_id - """) - ) op.drop_table("dag_version") diff --git a/airflow-core/src/airflow/migrations/versions/0049_3_0_0_remove_pickled_data_from_xcom_table.py b/airflow-core/src/airflow/migrations/versions/0049_3_0_0_remove_pickled_data_from_xcom_table.py index c3972edbd12ab..fed378378290a 100644 --- a/airflow-core/src/airflow/migrations/versions/0049_3_0_0_remove_pickled_data_from_xcom_table.py +++ b/airflow-core/src/airflow/migrations/versions/0049_3_0_0_remove_pickled_data_from_xcom_table.py @@ -28,7 +28,7 @@ from __future__ import annotations import sqlalchemy as sa -from alembic import op +from alembic import context, op from sqlalchemy import text from sqlalchemy.dialects.mysql import LONGBLOB @@ -77,9 +77,24 @@ def upgrade(): condition = condition_templates.get(dialect) if not condition: raise RuntimeError(f"Unsupported dialect: {dialect}") - # Key is a reserved keyword in MySQL, so we need to quote it quoted_key = conn.dialect.identifier_preparer.quote("key") + if dialect == "postgresql" and not context.is_offline_mode(): + curr_timeout = ( + int( + conn.execute( + text(""" + SELECT setting + FROM pg_settings + WHERE name = 'statement_timeout' + """) + ).scalar_one() + ) + / 1000 + ) + if curr_timeout > 0 and curr_timeout < 1800: + print("setting local statement timeout to 1800s") + conn.execute(text("SET LOCAL statement_timeout='1800s'")) # Archive pickled data using the condition conn.execute( diff --git a/airflow-core/src/airflow/migrations/versions/0055_3_0_0_remove_pickled_data_from_dagrun_table.py b/airflow-core/src/airflow/migrations/versions/0055_3_0_0_remove_pickled_data_from_dagrun_table.py index 07d012ddf5719..31b71e2bedc3b 100644 --- a/airflow-core/src/airflow/migrations/versions/0055_3_0_0_remove_pickled_data_from_dagrun_table.py +++ b/airflow-core/src/airflow/migrations/versions/0055_3_0_0_remove_pickled_data_from_dagrun_table.py @@ -47,6 +47,17 @@ def upgrade(): """Apply remove pickled data from dagrun table.""" conn = op.get_bind() + empty_vals = { + "mysql": "X'80057D942E'", + "postgresql": r"'\x80057D942E'", + "sqlite": "X'80057D942E'", + } + dialect = conn.dialect.name + try: + empty_val = empty_vals[dialect] + except KeyError: + raise RuntimeError(f"Dialect {dialect} not supported.") + conf_type = sa.JSON().with_variant(postgresql.JSONB, "postgresql") op.add_column("dag_run", sa.Column("conf_json", conf_type, nullable=True)) @@ -61,12 +72,20 @@ def upgrade(): """) ) else: - BATCH_SIZE = 100 + BATCH_SIZE = 1000 offset = 0 while True: + err_count = 0 + batch_num = offset + 1 + print(f"converting dag run conf. batch={batch_num}") rows = conn.execute( text( - f"SELECT id,conf FROM dag_run WHERE conf IS not NULL order by id LIMIT {BATCH_SIZE} OFFSET {offset}" + "SELECT id, conf " + "FROM dag_run " + "WHERE conf IS not NULL " + f"AND conf != {empty_val}" + f"ORDER BY id LIMIT {BATCH_SIZE} " + f"OFFSET {offset}" ) ).fetchall() if not rows: @@ -85,9 +104,11 @@ def upgrade(): """), {"json_data": json_data, "id": row_id}, ) - except Exception as e: - print(f"Error converting dagrun conf to json for dagrun ID {row_id}: {e}") + except Exception: + err_count += 1 continue + if err_count: + print(f"could not convert dag run conf for {err_count} records. batch={batch_num}") offset += BATCH_SIZE op.drop_column("dag_run", "conf") @@ -112,12 +133,16 @@ def downgrade(): ) else: - BATCH_SIZE = 100 + BATCH_SIZE = 1000 offset = 0 while True: rows = conn.execute( text( - f"SELECT id,conf FROM dag_run WHERE conf IS not NULL order by id LIMIT {BATCH_SIZE} OFFSET {offset}" + "SELECT id,conf " + "FROM dag_run " + "WHERE conf IS NOT NULL " + f"ORDER BY id LIMIT {BATCH_SIZE} " + f"OFFSET {offset}" ) ).fetchall() if not rows: diff --git a/airflow-core/src/airflow/migrations/versions/0059_3_0_0_remove_external_trigger_field.py b/airflow-core/src/airflow/migrations/versions/0059_3_0_0_remove_external_trigger_field.py index f559cf9f8d8f8..46d3cd9ca57e4 100644 --- a/airflow-core/src/airflow/migrations/versions/0059_3_0_0_remove_external_trigger_field.py +++ b/airflow-core/src/airflow/migrations/versions/0059_3_0_0_remove_external_trigger_field.py @@ -53,6 +53,6 @@ def downgrade(): ) op.execute( dag_run_table.update().values( - external_trigger=sa.case([(dag_run_table.c.run_type == "manual", True)], else_=False) + external_trigger=sa.case((dag_run_table.c.run_type == "manual", True), else_=False) ) ) diff --git a/airflow-core/src/airflow/migrations/versions/0069_3_0_3_delete_import_errors.py b/airflow-core/src/airflow/migrations/versions/0069_3_0_3_delete_import_errors.py new file mode 100644 index 0000000000000..c0f267b97b9d7 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0069_3_0_3_delete_import_errors.py @@ -0,0 +1,50 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Delete import errors. + +Revision ID: fe199e1abd77 +Revises: 29ce7909c52b +Create Date: 2025-06-10 08:53:28.782896 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "fe199e1abd77" +down_revision = "29ce7909c52b" +branch_labels = None +depends_on = None + +airflow_version = "3.0.3" + + +def upgrade(): + """Apply Delete import errors.""" + # delete import_error table rows + op.get_bind().execute(sa.text("DELETE FROM import_error")) + + +def downgrade(): + """Unapply Delete import errors.""" + pass diff --git a/airflow-core/src/airflow/migrations/versions/0070_3_1_0_add_deadline_to_dag.py b/airflow-core/src/airflow/migrations/versions/0070_3_1_0_add_deadline_to_dag.py new file mode 100644 index 0000000000000..470d9f157bb37 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0070_3_1_0_add_deadline_to_dag.py @@ -0,0 +1,50 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add Deadline to Dag. + +Revision ID: dfee8bd5d574 +Revises: fe199e1abd77 +Create Date: 2024-12-18 19:10:26.962464 +""" + +from __future__ import annotations + +import sqlalchemy as sa +import sqlalchemy_jsonfield +from alembic import op + +from airflow.settings import json + +revision = "dfee8bd5d574" +down_revision = "fe199e1abd77" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + op.add_column( + "dag", + sa.Column("deadline", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + ) + + +def downgrade(): + op.drop_column("dag", "deadline") diff --git a/airflow-core/src/airflow/migrations/versions/0071_3_1_0_rename_and_change_type_of_deadline_column.py b/airflow-core/src/airflow/migrations/versions/0071_3_1_0_rename_and_change_type_of_deadline_column.py new file mode 100644 index 0000000000000..b7fd7933a2979 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0071_3_1_0_rename_and_change_type_of_deadline_column.py @@ -0,0 +1,66 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Rename Deadline column in the Deadline table from deadline to deadline_time and change its type from DateTime to UTC DateTime. + +Revision ID: 0242ac120002 +Revises: dfee8bd5d574 +Create Date: 2024-12-18 19:10:26.962464 +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.migrations.db_types import TIMESTAMP + +revision = "0242ac120002" +down_revision = "dfee8bd5d574" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Apply change to deadline column in the deadline table.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.drop_index("deadline_idx") + batch_op.alter_column( + "deadline", + existing_type=sa.DateTime(), + type_=TIMESTAMP(timezone=True), + existing_nullable=False, + new_column_name="deadline_time", + ) + op.create_index("deadline_time_idx", "deadline", ["deadline_time"], unique=False) + + +def downgrade(): + """Unapply change to deadline column in the deadline table.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.drop_index("deadline_time_idx") + batch_op.alter_column( + "deadline_time", + existing_type=TIMESTAMP(timezone=True), + type_=sa.DateTime(), + existing_nullable=False, + new_column_name="deadline", + ) + op.create_index("deadline_idx", "deadline", ["deadline"], unique=False) diff --git a/airflow-core/src/airflow/migrations/versions/0072_3_1_0_change_ti_dag_version_fk_to_restrict.py b/airflow-core/src/airflow/migrations/versions/0072_3_1_0_change_ti_dag_version_fk_to_restrict.py new file mode 100644 index 0000000000000..43bdcc7a16b62 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0072_3_1_0_change_ti_dag_version_fk_to_restrict.py @@ -0,0 +1,60 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Change the on-delete behaviour of task_instance.dag_version_id foreign key constraint to RESTRICT. + +Revision ID: 3ac9e5732b1f +Revises: 0242ac120002 +Create Date: 2025-05-27 12:30:00.000000 +""" + +from __future__ import annotations + +from alembic import op + +revision = "3ac9e5732b1f" +down_revision = "0242ac120002" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Alter task_instance.dag_version_id foreign key to use ON DELETE RESTRICT.""" + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f("task_instance_dag_version_id_fkey"), type_="foreignkey") + batch_op.create_foreign_key( + batch_op.f("task_instance_dag_version_id_fkey"), + "dag_version", + ["dag_version_id"], + ["id"], + ondelete="RESTRICT", + ) + + +def downgrade(): + """Revert task_instance.dag_version_id foreign key to ON DELETE CASCADE.""" + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f("task_instance_dag_version_id_fkey"), type_="foreignkey") + batch_op.create_foreign_key( + batch_op.f("task_instance_dag_version_id_fkey"), + "dag_version", + ["dag_version_id"], + ["id"], + ondelete="CASCADE", + ) diff --git a/airflow-core/src/airflow/migrations/versions/0073_3_1_0_add_task_inlet_asset_reference.py b/airflow-core/src/airflow/migrations/versions/0073_3_1_0_add_task_inlet_asset_reference.py new file mode 100644 index 0000000000000..870f7e63c4906 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0073_3_1_0_add_task_inlet_asset_reference.py @@ -0,0 +1,70 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add task_inlet_asset_reference table. + +Revision ID: 583e80dfcef4 +Revises: 3ac9e5732b1f +Create Date: 2025-06-04 06:26:36.536172 +""" + +from __future__ import annotations + +from alembic import op +from sqlalchemy import Column, ForeignKeyConstraint, Index, Integer, PrimaryKeyConstraint + +from airflow.migrations.db_types import StringID +from airflow.utils.sqlalchemy import UtcDateTime + +revision = "583e80dfcef4" +down_revision = "3ac9e5732b1f" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Add task_inlet_asset_reference table.""" + op.create_table( + "task_inlet_asset_reference", + Column("asset_id", Integer, primary_key=True, nullable=False), + Column("dag_id", StringID(), primary_key=True, nullable=False), + Column("task_id", StringID(), primary_key=True, nullable=False), + Column("created_at", UtcDateTime, nullable=False), + Column("updated_at", UtcDateTime, nullable=False), + ForeignKeyConstraint( + ["asset_id"], + ["asset.id"], + name="tiar_asset_fkey", + ondelete="CASCADE", + ), + PrimaryKeyConstraint("asset_id", "dag_id", "task_id", name="tiar_pkey"), + ForeignKeyConstraint( + columns=["dag_id"], + refcolumns=["dag.dag_id"], + name="tiar_dag_id_fkey", + ondelete="CASCADE", + ), + Index("idx_task_inlet_asset_reference_dag_id", "dag_id"), + ) + + +def downgrade(): + """Remove task_inlet_asset_reference table.""" + op.drop_table("task_inlet_asset_reference") diff --git a/airflow-core/src/airflow/migrations/versions/0074_3_1_0_add_triggering_user_to_dag_run.py b/airflow-core/src/airflow/migrations/versions/0074_3_1_0_add_triggering_user_to_dag_run.py new file mode 100644 index 0000000000000..21d071b14bc49 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0074_3_1_0_add_triggering_user_to_dag_run.py @@ -0,0 +1,56 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add triggering user to dag_run. + +Revision ID: 66a7743fe20e +Revises: 583e80dfcef4 +Create Date: 2025-06-18 19:43:07.975512 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "66a7743fe20e" +down_revision = "583e80dfcef4" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Add triggering user to dag_run.""" + with op.batch_alter_table("backfill", schema=None) as batch_op: + batch_op.add_column(sa.Column("triggering_user_name", sa.String(length=512), nullable=True)) + + with op.batch_alter_table("dag_run", schema=None) as batch_op: + batch_op.add_column(sa.Column("triggering_user_name", sa.String(length=512), nullable=True)) + + +def downgrade(): + """Unapply triggering user to dag_run.""" + with op.batch_alter_table("dag_run", schema=None) as batch_op: + batch_op.drop_column("triggering_user_name") + + with op.batch_alter_table("backfill", schema=None) as batch_op: + batch_op.drop_column("triggering_user_name") diff --git a/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_dag_favorite_table.py b/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_dag_favorite_table.py new file mode 100644 index 0000000000000..657f2741e4ab8 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_dag_favorite_table.py @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add dag_favorite table. + +Revision ID: ffdb0566c7c0 +Revises: 66a7743fe20e +Create Date: 2025-06-05 15:06:08.903908 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.models.base import COLLATION_ARGS + +revision = "ffdb0566c7c0" +down_revision = "66a7743fe20e" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Apply add dag_favorite table.""" + op.create_table( + "dag_favorite", + sa.Column("user_id", sa.String(length=250), nullable=False), + sa.Column("dag_id", sa.String(length=250, **COLLATION_ARGS), nullable=False), + sa.ForeignKeyConstraint( + ["dag_id"], ["dag.dag_id"], name=op.f("dag_favorite_dag_id_fkey"), ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("user_id", "dag_id", name=op.f("dag_favorite_pkey")), + ) + + +def downgrade(): + """Unapply add dag_favorite table.""" + op.drop_table("dag_favorite") diff --git a/airflow-core/src/airflow/migrations/versions/0076_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0076_3_1_0_add_human_in_the_loop_response.py new file mode 100644 index 0000000000000..4b1c5a36e8994 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0076_3_1_0_add_human_in_the_loop_response.py @@ -0,0 +1,78 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add Human In the Loop Detail table. + +Revision ID: 40f7c30a228b +Revises: ffdb0566c7c0 +Create Date: 2025-07-04 15:05:19.459197 + +""" + +from __future__ import annotations + +import sqlalchemy_jsonfield +from alembic import op +from sqlalchemy import Boolean, Column, ForeignKeyConstraint, String, Text +from sqlalchemy.dialects import postgresql + +from airflow.settings import json +from airflow.utils.sqlalchemy import UtcDateTime + +# revision identifiers, used by Alembic. +revision = "40f7c30a228b" +down_revision = "ffdb0566c7c0" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Add Human In the Loop Detail table.""" + op.create_table( + "hitl_detail", + Column( + "ti_id", + String(length=36).with_variant(postgresql.UUID(), "postgresql"), + primary_key=True, + nullable=False, + ), + Column("options", sqlalchemy_jsonfield.JSONField(json=json), nullable=False), + Column("subject", Text, nullable=False), + Column("body", Text, nullable=True), + Column("defaults", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("multiple", Boolean, unique=False, default=False), + Column("params", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), + Column("response_at", UtcDateTime, nullable=True), + Column("user_id", String(128), nullable=True), + Column("chosen_options", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("params_input", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), + ForeignKeyConstraint( + ["ti_id"], + ["task_instance.id"], + name="hitl_detail_ti_fkey", + ondelete="CASCADE", + onupdate="CASCADE", + ), + ) + + +def downgrade(): + """Response Human In the Loop Detail table.""" + op.drop_table("hitl_detail") diff --git a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_trigger_id_to_deadline.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_trigger_id_to_deadline.py new file mode 100644 index 0000000000000..5f5cf34b0f485 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_trigger_id_to_deadline.py @@ -0,0 +1,52 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add trigger_id to deadline. + +Revision ID: 09fa89ba1710 +Revises: 40f7c30a228b +Create Date: 2025-07-11 22:37:17.706269 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "09fa89ba1710" +down_revision = "40f7c30a228b" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Add trigger_id to deadline.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.add_column(sa.Column("trigger_id", sa.Integer(), nullable=True)) + batch_op.create_foreign_key(batch_op.f("deadline_trigger_id_fkey"), "trigger", ["trigger_id"], ["id"]) + + +def downgrade(): + """Remove trigger_id from deadline.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f("deadline_trigger_id_fkey"), type_="foreignkey") + batch_op.drop_column("trigger_id") diff --git a/airflow-core/src/airflow/migrations/versions/0078_3_1_0_add_callback_state_to_deadline.py b/airflow-core/src/airflow/migrations/versions/0078_3_1_0_add_callback_state_to_deadline.py new file mode 100644 index 0000000000000..cd66ae74fe032 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0078_3_1_0_add_callback_state_to_deadline.py @@ -0,0 +1,56 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add callback_state to deadline. + +Revision ID: f56f68b9e02f +Revises: 09fa89ba1710 +Create Date: 2025-07-22 17:46:40.122517 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "f56f68b9e02f" +down_revision = "09fa89ba1710" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Add callback_state to deadline.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.add_column(sa.Column("callback_state", sa.String(length=20), nullable=True)) + batch_op.drop_index(batch_op.f("deadline_time_idx")) + batch_op.create_index( + "deadline_callback_state_time_idx", ["callback_state", "deadline_time"], unique=False + ) + + +def downgrade(): + """Remove callback_state from deadline.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.drop_index("deadline_callback_state_time_idx") + batch_op.create_index(batch_op.f("deadline_time_idx"), ["deadline_time"], unique=False) + batch_op.drop_column("callback_state") diff --git a/airflow-core/src/airflow/migrations/versions/0079_3_1_0_add_url_and_template_params_to_dagbundle_model.py b/airflow-core/src/airflow/migrations/versions/0079_3_1_0_add_url_and_template_params_to_dagbundle_model.py new file mode 100644 index 0000000000000..a5b3787c8758a --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0079_3_1_0_add_url_and_template_params_to_dagbundle_model.py @@ -0,0 +1,53 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add url template and template params to DagBundleModel. + +Revision ID: 3bda03debd04 +Revises: f56f68b9e02f +Create Date: 2025-07-04 10:12:12.711292 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op +from sqlalchemy_utils import JSONType + +# revision identifiers, used by Alembic. +revision = "3bda03debd04" +down_revision = "f56f68b9e02f" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Apply Add url and template params to DagBundleModel.""" + with op.batch_alter_table("dag_bundle", schema=None) as batch_op: + batch_op.add_column(sa.Column("signed_url_template", sa.String(length=200), nullable=True)) + batch_op.add_column(sa.Column("template_params", JSONType(), nullable=True)) + + +def downgrade(): + """Unapply Add url and template params to DagBundleModel.""" + with op.batch_alter_table("dag_bundle", schema=None) as batch_op: + batch_op.drop_column("template_params") + batch_op.drop_column("signed_url_template") diff --git a/airflow-core/src/airflow/migrations/versions/0080_3_1_0_modify_deadline_callback_schema.py b/airflow-core/src/airflow/migrations/versions/0080_3_1_0_modify_deadline_callback_schema.py new file mode 100644 index 0000000000000..0d5431d663f52 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0080_3_1_0_modify_deadline_callback_schema.py @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Modify deadline's callback schema. + +Revision ID: 808787349f22 +Revises: 3bda03debd04 +Create Date: 2025-07-31 19:35:53.150465 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +import sqlalchemy_jsonfield +from alembic import op + +# revision identifiers, used by Alembic. +revision = "808787349f22" +down_revision = "3bda03debd04" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Replace deadline table's string callback and JSON callback_kwargs with JSON callback.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.drop_column("callback") + batch_op.drop_column("callback_kwargs") + batch_op.add_column(sa.Column("callback", sqlalchemy_jsonfield.jsonfield.JSONField(), nullable=False)) + + +def downgrade(): + """Replace deadline table's JSON callback with string callback and JSON callback_kwargs.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.drop_column("callback") + batch_op.add_column( + sa.Column("callback_kwargs", sqlalchemy_jsonfield.jsonfield.JSONField(), nullable=True) + ) + batch_op.add_column(sa.Column("callback", sa.String(length=500), nullable=False)) diff --git a/airflow-core/src/airflow/migrations/versions/0081_3_1_0_remove_dag_id_from_deadline.py b/airflow-core/src/airflow/migrations/versions/0081_3_1_0_remove_dag_id_from_deadline.py new file mode 100644 index 0000000000000..f3cfe2449c22a --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0081_3_1_0_remove_dag_id_from_deadline.py @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Remove dag_id from Deadline. + +Revision ID: a169942745c2 +Revises: 808787349f22 +Create Date: 2025-08-07 22:26:13.053501 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.migrations.db_types import StringID +from airflow.models import ID_LEN + +# revision identifiers, used by Alembic. +revision = "a169942745c2" +down_revision = "808787349f22" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Remove dag_id from Deadline.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f("deadline_dag_id_fkey"), type_="foreignkey") + batch_op.drop_column("dag_id") + + +def downgrade(): + """Add dag_id to Deadline.""" + with op.batch_alter_table("deadline", schema=None) as batch_op: + batch_op.add_column(sa.Column("dag_id", StringID(length=ID_LEN), nullable=True)) + batch_op.create_foreign_key( + batch_op.f("deadline_dag_id_fkey"), "dag", ["dag_id"], ["dag_id"], ondelete="CASCADE" + ) diff --git a/airflow-core/src/airflow/models/__init__.py b/airflow-core/src/airflow/models/__init__.py index 9274ae7a79f3f..917cd8cd06750 100644 --- a/airflow-core/src/airflow/models/__init__.py +++ b/airflow-core/src/airflow/models/__init__.py @@ -19,6 +19,8 @@ from __future__ import annotations +from airflow.utils.deprecation_tools import add_deprecated_classes + # Do not add new models to this -- this is for compat only __all__ = [ "DAG", @@ -58,10 +60,10 @@ def import_all_models(): import airflow.models.asset import airflow.models.backfill + import airflow.models.dag_favorite import airflow.models.dag_version import airflow.models.dagbundle import airflow.models.dagwarning - import airflow.models.deadline import airflow.models.errors import airflow.models.serialized_dag import airflow.models.taskinstancehistory @@ -89,7 +91,7 @@ def __getattr__(name): "DAG": "airflow.models.dag", "ID_LEN": "airflow.models.base", "Base": "airflow.models.base", - "BaseOperator": "airflow.models.baseoperator", + "BaseOperator": "airflow.sdk.bases.operator", "BaseOperatorLink": "airflow.sdk.bases.operatorlink", "BaseXCom": "airflow.sdk.bases.xcom", "Connection": "airflow.models.connection", @@ -99,9 +101,10 @@ def __getattr__(name): "DagTag": "airflow.models.dag", "DagWarning": "airflow.models.dagwarning", "DbCallbackRequest": "airflow.models.db_callback_request", + "Deadline": "airflow.models.deadline", "Log": "airflow.models.log", + "HITLDetail": "airflow.models.hitl", "MappedOperator": "airflow.models.mappedoperator", - "Operator": "airflow.models.operator", "Param": "airflow.sdk.definitions.param", "Pool": "airflow.models.pool", "RenderedTaskInstanceFields": "airflow.models.renderedtifields", @@ -118,7 +121,6 @@ def __getattr__(name): # I was unable to get mypy to respect a airflow/models/__init__.pyi, so # having to resort back to this hacky method from airflow.models.base import ID_LEN, Base - from airflow.models.baseoperator import BaseOperator from airflow.models.connection import Connection from airflow.models.dag import DAG, DagModel, DagTag from airflow.models.dagbag import DagBag @@ -128,7 +130,6 @@ def __getattr__(name): from airflow.models.deadline import Deadline from airflow.models.log import Log from airflow.models.mappedoperator import MappedOperator - from airflow.models.operator import Operator from airflow.models.pool import Pool from airflow.models.renderedtifields import RenderedTaskInstanceFields from airflow.models.skipmixin import SkipMixin @@ -137,7 +138,38 @@ def __getattr__(name): from airflow.models.taskreschedule import TaskReschedule from airflow.models.trigger import Trigger from airflow.models.variable import Variable + from airflow.sdk.bases.operator import BaseOperator from airflow.sdk.bases.operatorlink import BaseOperatorLink from airflow.sdk.bases.xcom import BaseXCom from airflow.sdk.definitions.param import Param from airflow.sdk.execution_time.xcom import XCom + + +__deprecated_classes = { + "abstractoperator": { + "AbstractOperator": "airflow.sdk.definitions._internal.abstractoperator.AbstractOperator", + "NotMapped": "airflow.sdk.definitions._internal.abstractoperator.NotMapped", + "TaskStateChangeCallback": "airflow.sdk.definitions._internal.abstractoperator.TaskStateChangeCallback", + "DEFAULT_OWNER": "airflow.sdk.definitions._internal.abstractoperator.DEFAULT_OWNER", + "DEFAULT_QUEUE": "airflow.sdk.definitions._internal.abstractoperator.DEFAULT_QUEUE", + "DEFAULT_TASK_EXECUTION_TIMEOUT": "airflow.sdk.definitions._internal.abstractoperator.DEFAULT_TASK_EXECUTION_TIMEOUT", + }, + "param": { + "Param": "airflow.sdk.definitions.param.Param", + "ParamsDict": "airflow.sdk.definitions.param.ParamsDict", + }, + "baseoperator": { + "BaseOperator": "airflow.sdk.bases.operator.BaseOperator", + "chain": "airflow.sdk.bases.operator.chain", + "chain_linear": "airflow.sdk.bases.operator.chain_linear", + "cross_downstream": "airflow.sdk.bases.operator.cross_downstream", + }, + "baseoperatorlink": { + "BaseOperatorLink": "airflow.sdk.bases.operatorlink.BaseOperatorLink", + }, + "operator": { + "BaseOperator": "airflow.sdk.bases.operator.BaseOperator", + "Operator": "airflow.sdk.types.Operator", + }, +} +add_deprecated_classes(__deprecated_classes, __name__) diff --git a/airflow-core/src/airflow/models/abstractoperator.py b/airflow-core/src/airflow/models/abstractoperator.py deleted file mode 100644 index e5b5f7dc81f45..0000000000000 --- a/airflow-core/src/airflow/models/abstractoperator.py +++ /dev/null @@ -1,34 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import datetime - -from airflow.configuration import conf -from airflow.sdk.definitions._internal.abstractoperator import ( - AbstractOperator as AbstractOperator, - NotMapped as NotMapped, # Re-export this for compat - TaskStateChangeCallback as TaskStateChangeCallback, -) - -DEFAULT_OWNER: str = conf.get_mandatory_value("operators", "default_owner") -DEFAULT_QUEUE: str = conf.get_mandatory_value("operators", "default_queue") - -DEFAULT_TASK_EXECUTION_TIMEOUT: datetime.timedelta | None = conf.gettimedelta( - "core", "default_task_execution_timeout" -) diff --git a/airflow-core/src/airflow/models/asset.py b/airflow-core/src/airflow/models/asset.py index 17a90031da26c..8b28f2bea6184 100644 --- a/airflow-core/src/airflow/models/asset.py +++ b/airflow-core/src/airflow/models/asset.py @@ -36,9 +36,9 @@ ) from sqlalchemy.orm import relationship +from airflow._shared.timezones import timezone from airflow.models.base import Base, StringID from airflow.settings import json -from airflow.utils import timezone from airflow.utils.sqlalchemy import UtcDateTime if TYPE_CHECKING: @@ -194,7 +194,7 @@ class AssetAliasModel(Base): secondary=asset_alias_asset_event_association_table, back_populates="source_aliases", ) - consuming_dags = relationship("DagScheduleAssetAliasReference", back_populates="asset_alias") + scheduled_dags = relationship("DagScheduleAssetAliasReference", back_populates="asset_alias") @classmethod def from_public(cls, obj: AssetAlias) -> AssetAliasModel: @@ -272,8 +272,9 @@ class AssetModel(Base): active = relationship("AssetActive", uselist=False, viewonly=True, back_populates="asset") - consuming_dags = relationship("DagScheduleAssetReference", back_populates="asset") + scheduled_dags = relationship("DagScheduleAssetReference", back_populates="asset") producing_tasks = relationship("TaskOutletAssetReference", back_populates="asset") + consuming_tasks = relationship("TaskInletAssetReference", back_populates="asset") triggers = relationship("Trigger", secondary=asset_trigger_association_table, back_populates="assets") __tablename__ = "asset" @@ -478,7 +479,7 @@ class DagScheduleAssetAliasReference(Base): created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) - asset_alias = relationship("AssetAliasModel", back_populates="consuming_dags") + asset_alias = relationship("AssetAliasModel", back_populates="scheduled_dags") dag = relationship("DagModel", back_populates="schedule_asset_alias_references") __tablename__ = "dag_schedule_asset_alias_reference" @@ -520,7 +521,7 @@ class DagScheduleAssetReference(Base): created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) - asset = relationship("AssetModel", back_populates="consuming_dags") + asset = relationship("AssetModel", back_populates="scheduled_dags") dag = relationship("DagModel", back_populates="schedule_asset_references") queue_records = relationship( @@ -612,6 +613,50 @@ def __repr__(self): return f"{self.__class__.__name__}({', '.join(args)})" +class TaskInletAssetReference(Base): + """References from a task to an asset that it references as an inlet.""" + + asset_id = Column(Integer, primary_key=True, nullable=False) + dag_id = Column(StringID(), primary_key=True, nullable=False) + task_id = Column(StringID(), primary_key=True, nullable=False) + created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) + updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) + + asset = relationship("AssetModel", back_populates="consuming_tasks") + + __tablename__ = "task_inlet_asset_reference" + __table_args__ = ( + ForeignKeyConstraint( + (asset_id,), + ["asset.id"], + name="tiar_asset_fkey", + ondelete="CASCADE", + ), + PrimaryKeyConstraint(asset_id, dag_id, task_id, name="tiar_pkey"), + ForeignKeyConstraint( + columns=(dag_id,), + refcolumns=["dag.dag_id"], + name="tiar_dag_id_fkey", + ondelete="CASCADE", + ), + Index("idx_task_inlet_asset_reference_dag_id", dag_id), + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + return ( + self.asset_id == other.asset_id and self.dag_id == other.dag_id and self.task_id == other.task_id + ) + + def __hash__(self): + return hash(self.__mapper__.primary_key) + + def __repr__(self): + args = (f"{(attr := x.name)}={getattr(self, attr)!r}" for x in self.__mapper__.primary_key) + return f"{self.__class__.__name__}({', '.join(args)})" + + class AssetDagRunQueue(Base): """Model for storing asset events that need processing.""" @@ -619,6 +664,7 @@ class AssetDagRunQueue(Base): target_dag_id = Column(StringID(), primary_key=True, nullable=False) created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) asset = relationship("AssetModel", viewonly=True) + dag_model = relationship("DagModel", viewonly=True) __tablename__ = "asset_dag_run_queue" __table_args__ = ( diff --git a/airflow-core/src/airflow/models/backfill.py b/airflow-core/src/airflow/models/backfill.py index 640b915a0866a..4142265b8bd44 100644 --- a/airflow-core/src/airflow/models/backfill.py +++ b/airflow-core/src/airflow/models/backfill.py @@ -32,6 +32,7 @@ Column, ForeignKeyConstraint, Integer, + String, UniqueConstraint, desc, func, @@ -41,10 +42,10 @@ from sqlalchemy.orm import relationship, validates from sqlalchemy_jsonfield import JSONField +from airflow._shared.timezones import timezone from airflow.exceptions import AirflowException, DagNotFound from airflow.models.base import Base, StringID from airflow.settings import json -from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.sqlalchemy import UtcDateTime, nulls_first, with_row_locks from airflow.utils.state import DagRunState @@ -132,9 +133,20 @@ class Backfill(Base): created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) completed_at = Column(UtcDateTime, nullable=True) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) + triggering_user_name = Column( + String(512), + nullable=True, + ) # The user that triggered the Backfill, if applicable backfill_dag_run_associations = relationship("BackfillDagRun", back_populates="backfill") + dag_model = relationship( + "DagModel", + primaryjoin="DagModel.dag_id == Backfill.dag_id", + viewonly=True, + foreign_keys=[dag_id], + ) + def __repr__(self): return f"Backfill({self.dag_id=}, {self.from_date=}, {self.to_date=})" @@ -278,6 +290,8 @@ def _create_backfill_dag_run( backfill_id, dag_run_conf, backfill_sort_ordinal, + triggering_user_name, + run_on_latest_version, session, ): from airflow.models.dagrun import DagRun @@ -299,7 +313,10 @@ def _create_backfill_dag_run( return lock = session.execute( with_row_locks( - query=select(DagRun).where(DagRun.logical_date == info.logical_date), + query=select(DagRun).where( + DagRun.logical_date == info.logical_date, + DagRun.dag_id == dag.dag_id, + ), session=session, skip_locked=True, ) @@ -312,6 +329,7 @@ def _create_backfill_dag_run( info=info, backfill_id=backfill_id, sort_ordinal=backfill_sort_ordinal, + run_on_latest=run_on_latest_version, ) else: session.add( @@ -336,6 +354,7 @@ def _create_backfill_dag_run( conf=dag_run_conf, run_type=DagRunType.BACKFILL_JOB, triggered_by=DagRunTriggeredByType.BACKFILL, + triggering_user_name=triggering_user_name, state=DagRunState.QUEUED, start_date=timezone.utcnow(), backfill_id=backfill_id, @@ -384,7 +403,7 @@ def _get_info_list( return dagrun_info_list -def _handle_clear_run(session, dag, dr, info, backfill_id, sort_ordinal): +def _handle_clear_run(session, dag, dr, info, backfill_id, sort_ordinal, run_on_latest=False): """Clear the existing DAG run and update backfill metadata.""" from sqlalchemy.sql import update @@ -398,12 +417,13 @@ def _handle_clear_run(session, dag, dr, info, backfill_id, sort_ordinal): session=session, confirm_prompt=False, dry_run=False, + run_on_latest_version=run_on_latest, ) # Update backfill_id and run_type in DagRun table session.execute( update(DagRun) - .where(DagRun.logical_date == info.logical_date) + .where(DagRun.logical_date == info.logical_date, DagRun.dag_id == dag.dag_id) .values( backfill_id=backfill_id, run_type=DagRunType.BACKFILL_JOB, @@ -428,7 +448,9 @@ def _create_backfill( max_active_runs: int, reverse: bool, dag_run_conf: dict | None, + triggering_user_name: str | None, reprocess_behavior: ReprocessBehavior | None = None, + run_on_latest_version: bool = False, ) -> Backfill | None: from airflow.models import DagModel from airflow.models.serialized_dag import SerializedDagModel @@ -465,6 +487,8 @@ def _create_backfill( max_active_runs=max_active_runs, dag_run_conf=dag_run_conf, reprocess_behavior=reprocess_behavior, + dag_model=dag, + triggering_user_name=triggering_user_name, ) session.add(br) session.commit() @@ -489,6 +513,8 @@ def _create_backfill( dag_run_conf=br.dag_run_conf, reprocess_behavior=br.reprocess_behavior, backfill_sort_ordinal=backfill_sort_ordinal, + triggering_user_name=br.triggering_user_name, + run_on_latest_version=run_on_latest_version, session=session, ) log.info( diff --git a/airflow-core/src/airflow/models/base.py b/airflow-core/src/airflow/models/base.py index c146f4619efb3..0548e08a8f605 100644 --- a/airflow-core/src/airflow/models/base.py +++ b/airflow-core/src/airflow/models/base.py @@ -23,6 +23,7 @@ from sqlalchemy.orm import registry from airflow.configuration import conf +from airflow.utils.sqlalchemy import is_sqlalchemy_v1 SQL_ALCHEMY_SCHEMA = conf.get("database", "SQL_ALCHEMY_SCHEMA") @@ -52,6 +53,10 @@ def _get_schema(): Base = Any else: Base = mapper_registry.generate_base() + # TEMPORARY workaround to allow using unmapped (v1.4) models in SQLAlchemy 2.0. It is intended only to + # unblock the development of SQLA2 support. + if not is_sqlalchemy_v1(): + Base.__allow_unmapped__ = True ID_LEN = 250 diff --git a/airflow-core/src/airflow/models/baseoperator.py b/airflow-core/src/airflow/models/baseoperator.py deleted file mode 100644 index 4c766007142c2..0000000000000 --- a/airflow-core/src/airflow/models/baseoperator.py +++ /dev/null @@ -1,693 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -""" -Base operator for all operators. - -:sphinx-autoapi-skip: -""" - -from __future__ import annotations - -import functools -import logging -import operator -from collections.abc import Collection, Iterable, Iterator -from datetime import datetime -from functools import singledispatchmethod -from typing import TYPE_CHECKING, Any - -import pendulum -from sqlalchemy import select -from sqlalchemy.orm.exc import NoResultFound - -# Keeping this file at all is a temp thing as we migrate the repo to the task sdk as the base, but to keep -# main working and useful for others to develop against we use the TaskSDK here but keep this file around -from airflow.models.taskinstance import TaskInstance, clear_task_instances -from airflow.sdk.bases.operator import ( - BaseOperator as TaskSDKBaseOperator, - # Re-export for compat - chain as chain, - chain_linear as chain_linear, - cross_downstream as cross_downstream, - get_merged_defaults as get_merged_defaults, -) -from airflow.sdk.definitions._internal.abstractoperator import ( - AbstractOperator as TaskSDKAbstractOperator, - NotMapped, -) -from airflow.sdk.definitions.mappedoperator import MappedOperator -from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup -from airflow.serialization.enums import DagAttributeTypes -from airflow.ti_deps.deps.mapped_task_upstream_dep import MappedTaskUpstreamDep -from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep -from airflow.ti_deps.deps.not_previously_skipped_dep import NotPreviouslySkippedDep -from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep -from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep -from airflow.utils import timezone -from airflow.utils.session import NEW_SESSION, provide_session -from airflow.utils.state import DagRunState -from airflow.utils.types import DagRunTriggeredByType -from airflow.utils.xcom import XCOM_RETURN_KEY - -if TYPE_CHECKING: - from sqlalchemy.orm import Session - - from airflow.models.dag import DAG as SchedulerDAG - from airflow.models.operator import Operator - from airflow.sdk import BaseOperatorLink, Context - from airflow.sdk.definitions._internal.node import DAGNode - from airflow.ti_deps.deps.base_ti_dep import BaseTIDep - from airflow.triggers.base import StartTriggerArgs - -logger = logging.getLogger("airflow.models.baseoperator.BaseOperator") - - -class BaseOperator(TaskSDKBaseOperator): - r""" - Abstract base class for all operators. - - Since operators create objects that become nodes in the DAG, BaseOperator - contains many recursive methods for DAG crawling behavior. To derive from - this class, you are expected to override the constructor and the 'execute' - method. - - Operators derived from this class should perform or trigger certain tasks - synchronously (wait for completion). Example of operators could be an - operator that runs a Pig job (PigOperator), a sensor operator that - waits for a partition to land in Hive (HiveSensorOperator), or one that - moves data from Hive to MySQL (Hive2MySqlOperator). Instances of these - operators (tasks) target specific operations, running specific scripts, - functions or data transfers. - - This class is abstract and shouldn't be instantiated. Instantiating a - class derived from this one results in the creation of a task object, - which ultimately becomes a node in DAG objects. Task dependencies should - be set by using the set_upstream and/or set_downstream methods. - - :param task_id: a unique, meaningful id for the task - :param owner: the owner of the task. Using a meaningful description - (e.g. user/person/team/role name) to clarify ownership is recommended. - :param email: the 'to' email address(es) used in email alerts. This can be a - single email or multiple ones. Multiple addresses can be specified as a - comma or semicolon separated string or by passing a list of strings. - :param email_on_retry: Indicates whether email alerts should be sent when a - task is retried - :param email_on_failure: Indicates whether email alerts should be sent when - a task failed - :param retries: the number of retries that should be performed before - failing the task - :param retry_delay: delay between retries, can be set as ``timedelta`` or - ``float`` seconds, which will be converted into ``timedelta``, - the default is ``timedelta(seconds=300)``. - :param retry_exponential_backoff: allow progressively longer waits between - retries by using exponential backoff algorithm on retry delay (delay - will be converted into seconds) - :param max_retry_delay: maximum delay interval between retries, can be set as - ``timedelta`` or ``float`` seconds, which will be converted into ``timedelta``. - :param start_date: The ``start_date`` for the task, determines - the ``logical_date`` for the first task instance. The best practice - is to have the start_date rounded - to your DAG's schedule. Daily jobs have their start_date - some day at 00:00:00, hourly jobs have their start_date at 00:00 - of a specific hour. Note that Airflow simply looks at the latest - ``logical_date`` and adds the schedule to determine - the next ``logical_date``. It is also very important - to note that different tasks' dependencies - need to line up in time. If task A depends on task B and their - start_date are offset in a way that their logical_date don't line - up, A's dependencies will never be met. If you are looking to delay - a task, for example running a daily task at 2AM, look into the - ``TimeSensor`` and ``TimeDeltaSensor``. We advise against using - dynamic ``start_date`` and recommend using fixed ones. Read the - FAQ entry about start_date for more information. - :param end_date: if specified, the scheduler won't go beyond this date - :param depends_on_past: when set to true, task instances will run - sequentially and only if the previous instance has succeeded or has been skipped. - The task instance for the start_date is allowed to run. - :param wait_for_past_depends_before_skipping: when set to true, if the task instance - should be marked as skipped, and depends_on_past is true, the ti will stay on None state - waiting the task of the previous run - :param wait_for_downstream: when set to true, an instance of task - X will wait for tasks immediately downstream of the previous instance - of task X to finish successfully or be skipped before it runs. This is useful if the - different instances of a task X alter the same asset, and this asset - is used by tasks downstream of task X. Note that depends_on_past - is forced to True wherever wait_for_downstream is used. Also note that - only tasks *immediately* downstream of the previous task instance are waited - for; the statuses of any tasks further downstream are ignored. - :param dag: a reference to the dag the task is attached to (if any) - :param priority_weight: priority weight of this task against other task. - This allows the executor to trigger higher priority tasks before - others when things get backed up. Set priority_weight as a higher - number for more important tasks. - As not all database engines support 64-bit integers, values are capped with 32-bit. - Valid range is from -2,147,483,648 to 2,147,483,647. - :param weight_rule: weighting method used for the effective total - priority weight of the task. Options are: - ``{ downstream | upstream | absolute }`` default is ``downstream`` - When set to ``downstream`` the effective weight of the task is the - aggregate sum of all downstream descendants. As a result, upstream - tasks will have higher weight and will be scheduled more aggressively - when using positive weight values. This is useful when you have - multiple dag run instances and desire to have all upstream tasks to - complete for all runs before each dag can continue processing - downstream tasks. When set to ``upstream`` the effective weight is the - aggregate sum of all upstream ancestors. This is the opposite where - downstream tasks have higher weight and will be scheduled more - aggressively when using positive weight values. This is useful when you - have multiple dag run instances and prefer to have each dag complete - before starting upstream tasks of other dags. When set to - ``absolute``, the effective weight is the exact ``priority_weight`` - specified without additional weighting. You may want to do this when - you know exactly what priority weight each task should have. - Additionally, when set to ``absolute``, there is bonus effect of - significantly speeding up the task creation process as for very large - DAGs. Options can be set as string or using the constants defined in - the static class ``airflow.utils.WeightRule``. - Irrespective of the weight rule, resulting priority values are capped with 32-bit. - |experimental| - Since 2.9.0, Airflow allows to define custom priority weight strategy, - by creating a subclass of - ``airflow.task.priority_strategy.PriorityWeightStrategy`` and registering - in a plugin, then providing the class path or the class instance via - ``weight_rule`` parameter. The custom priority weight strategy will be - used to calculate the effective total priority weight of the task instance. - :param queue: which queue to target when running this job. Not - all executors implement queue management, the CeleryExecutor - does support targeting specific queues. - :param pool: the slot pool this task should run in, slot pools are a - way to limit concurrency for certain tasks - :param pool_slots: the number of pool slots this task should use (>= 1) - Values less than 1 are not allowed. - :param sla: DEPRECATED - The SLA feature is removed in Airflow 3.0, to be replaced with a new implementation in 3.1 - :param execution_timeout: max time allowed for the execution of - this task instance, if it goes beyond it will raise and fail. - :param on_failure_callback: a function or list of functions to be called when a task instance - of this task fails. a context dictionary is passed as a single - parameter to this function. Context contains references to related - objects to the task instance and is documented under the macros - section of the API. - :param on_execute_callback: much like the ``on_failure_callback`` except - that it is executed right before the task is executed. - :param on_retry_callback: much like the ``on_failure_callback`` except - that it is executed when retries occur. - :param on_success_callback: much like the ``on_failure_callback`` except - that it is executed when the task succeeds. - :param on_skipped_callback: much like the ``on_failure_callback`` except - that it is executed when skipped occur; this callback will be called only if AirflowSkipException get raised. - Explicitly it is NOT called if a task is not started to be executed because of a preceding branching - decision in the DAG or a trigger rule which causes execution to skip so that the task execution - is never scheduled. - :param pre_execute: a function to be called immediately before task - execution, receiving a context dictionary; raising an exception will - prevent the task from being executed. - - |experimental| - :param post_execute: a function to be called immediately after task - execution, receiving a context dictionary and task result; raising an - exception will prevent the task from succeeding. - - |experimental| - :param trigger_rule: defines the rule by which dependencies are applied - for the task to get triggered. Options are: - ``{ all_success | all_failed | all_done | all_skipped | one_success | one_done | - one_failed | none_failed | none_failed_min_one_success | none_skipped | always}`` - default is ``all_success``. Options can be set as string or - using the constants defined in the static class - ``airflow.utils.TriggerRule`` - :param resources: A map of resource parameter names (the argument names of the - Resources constructor) to their values. - :param run_as_user: unix username to impersonate while running the task - :param max_active_tis_per_dag: When set, a task will be able to limit the concurrent - runs across logical_dates. - :param max_active_tis_per_dagrun: When set, a task will be able to limit the concurrent - task instances per DAG run. - :param executor: Which executor to target when running this task. NOT YET SUPPORTED - :param executor_config: Additional task-level configuration parameters that are - interpreted by a specific executor. Parameters are namespaced by the name of - executor. - - **Example**: to run this task in a specific docker container through - the KubernetesExecutor :: - - MyOperator(..., executor_config={"KubernetesExecutor": {"image": "myCustomDockerImage"}}) - - :param do_xcom_push: if True, an XCom is pushed containing the Operator's - result - :param multiple_outputs: if True and do_xcom_push is True, pushes multiple XComs, one for each - key in the returned dictionary result. If False and do_xcom_push is True, pushes a single XCom. - :param task_group: The TaskGroup to which the task should belong. This is typically provided when not - using a TaskGroup as a context manager. - :param doc: Add documentation or notes to your Task objects that is visible in - Task Instance details View in the Webserver - :param doc_md: Add documentation (in Markdown format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param doc_rst: Add documentation (in RST format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param doc_json: Add documentation (in JSON format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param doc_yaml: Add documentation (in YAML format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param task_display_name: The display name of the task which appears on the UI. - :param logger_name: Name of the logger used by the Operator to emit logs. - If set to `None` (default), the logger name will fall back to - `airflow.task.operators.{class.__module__}.{class.__name__}` (e.g. SimpleHttpOperator will have - *airflow.task.operators.airflow.providers.http.operators.http.SimpleHttpOperator* as logger). - :param allow_nested_operators: if True, when an operator is executed within another one a warning message - will be logged. If False, then an exception will be raised if the operator is badly used (e.g. nested - within another one). In future releases of Airflow this parameter will be removed and an exception - will always be thrown when operators are nested within each other (default is True). - - **Example**: example of a bad operator mixin usage:: - - @task(provide_context=True) - def say_hello_world(**context): - hello_world_task = BashOperator( - task_id="hello_world_task", - bash_command="python -c \"print('Hello, world!')\"", - dag=dag, - ) - hello_world_task.execute(context) - """ - - def __init__(self, **kwargs): - if start_date := kwargs.get("start_date", None): - kwargs["start_date"] = timezone.convert_to_utc(start_date) - if end_date := kwargs.get("end_date", None): - kwargs["end_date"] = timezone.convert_to_utc(end_date) - super().__init__(**kwargs) - - # Defines the operator level extra links - operator_extra_links: Collection[BaseOperatorLink] = () - - if TYPE_CHECKING: - - @property # type: ignore[override] - def dag(self) -> SchedulerDAG: # type: ignore[override] - return super().dag # type: ignore[return-value] - - @dag.setter - def dag(self, val: SchedulerDAG): - # For type checking only - ... - - def get_inlet_defs(self): - """ - Get inlet definitions on this task. - - :meta private: - """ - return self.inlets - - def get_outlet_defs(self): - """ - Get outlet definitions on this task. - - :meta private: - """ - return self.outlets - - deps: frozenset[BaseTIDep] = frozenset( - { - NotInRetryPeriodDep(), - PrevDagrunDep(), - TriggerRuleDep(), - NotPreviouslySkippedDep(), - MappedTaskUpstreamDep(), - } - ) - """ - Returns the set of dependencies for the operator. These differ from execution - context dependencies in that they are specific to tasks and can be - extended/overridden by subclasses. - """ - - def execute(self, context: Context) -> Any: - """ - Derive when creating an operator. - - Context is the same dictionary used as when rendering jinja templates. - - Refer to get_template_context for more context. - """ - raise NotImplementedError() - - @provide_session - def clear( - self, - start_date: datetime | None = None, - end_date: datetime | None = None, - upstream: bool = False, - downstream: bool = False, - session: Session = NEW_SESSION, - ): - """Clear the state of task instances associated with the task, following the parameters specified.""" - qry = select(TaskInstance).where(TaskInstance.dag_id == self.dag_id) - - if start_date: - qry = qry.where(TaskInstance.logical_date >= start_date) - if end_date: - qry = qry.where(TaskInstance.logical_date <= end_date) - - tasks = [self.task_id] - - if upstream: - tasks += [t.task_id for t in self.get_flat_relatives(upstream=True)] - - if downstream: - tasks += [t.task_id for t in self.get_flat_relatives(upstream=False)] - - qry = qry.where(TaskInstance.task_id.in_(tasks)) - results = session.scalars(qry).all() - count = len(results) - - if TYPE_CHECKING: - # TODO: Task-SDK: We need to set this to the scheduler DAG until we fully separate scheduling and - # definition code - assert isinstance(self.dag, SchedulerDAG) - - clear_task_instances(results, session, dag=self.dag) - session.commit() - return count - - @provide_session - def get_task_instances( - self, - start_date: datetime | None = None, - end_date: datetime | None = None, - session: Session = NEW_SESSION, - ) -> list[TaskInstance]: - """Get task instances related to this task for a specific date range.""" - from airflow.models import DagRun - - query = ( - select(TaskInstance) - .join(TaskInstance.dag_run) - .where(TaskInstance.dag_id == self.dag_id) - .where(TaskInstance.task_id == self.task_id) - ) - if start_date: - query = query.where(DagRun.logical_date >= start_date) - if end_date: - query = query.where(DagRun.logical_date <= end_date) - return session.scalars(query.order_by(DagRun.logical_date)).all() - - @provide_session - def run( - self, - start_date: datetime | None = None, - end_date: datetime | None = None, - ignore_first_depends_on_past: bool = True, - wait_for_past_depends_before_skipping: bool = False, - ignore_ti_state: bool = False, - mark_success: bool = False, - test_mode: bool = False, - session: Session = NEW_SESSION, - ) -> None: - """Run a set of task instances for a date range.""" - from airflow.models import DagRun - from airflow.utils.types import DagRunType - - # Assertions for typing -- we need a dag, for this function, and when we have a DAG we are - # _guaranteed_ to have start_date (else we couldn't have been added to a DAG) - if TYPE_CHECKING: - assert self.start_date - - # TODO: Task-SDK: We need to set this to the scheduler DAG until we fully separate scheduling and - # definition code - assert isinstance(self.dag, SchedulerDAG) - - start_date = pendulum.instance(start_date or self.start_date) - end_date = pendulum.instance(end_date or self.end_date or timezone.utcnow()) - - for info in self.dag.iter_dagrun_infos_between(start_date, end_date, align=False): - ignore_depends_on_past = info.logical_date == start_date and ignore_first_depends_on_past - try: - dag_run = session.scalars( - select(DagRun).where( - DagRun.dag_id == self.dag_id, - DagRun.logical_date == info.logical_date, - ) - ).one() - ti = TaskInstance(self, run_id=dag_run.run_id) - except NoResultFound: - # This is _mostly_ only used in tests - dr = DagRun( - dag_id=self.dag_id, - run_id=DagRun.generate_run_id( - run_type=DagRunType.MANUAL, - logical_date=info.logical_date, - run_after=info.run_after, - ), - run_type=DagRunType.MANUAL, - logical_date=info.logical_date, - data_interval=info.data_interval, - run_after=info.run_after, - triggered_by=DagRunTriggeredByType.TEST, - state=DagRunState.RUNNING, - ) - ti = TaskInstance(self, run_id=dr.run_id) - ti.dag_run = dr - session.add(dr) - session.flush() - - ti.run( - mark_success=mark_success, - ignore_depends_on_past=ignore_depends_on_past, - wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, - ignore_ti_state=ignore_ti_state, - test_mode=test_mode, - session=session, - ) - - def dry_run(self) -> None: - """Perform dry run for the operator - just render template fields.""" - self.log.info("Dry run") - for field in self.template_fields: - try: - content = getattr(self, field) - except AttributeError: - raise AttributeError( - f"{field!r} is configured as a template field " - f"but {self.task_type} does not have this attribute." - ) - - if content and isinstance(content, str): - self.log.info("Rendering template for %s", field) - self.log.info(content) - - def get_direct_relatives(self, upstream: bool = False) -> Iterable[Operator]: - """Get list of the direct relatives to the current task, upstream or downstream.""" - if upstream: - return self.upstream_list - return self.downstream_list - - @staticmethod - def xcom_push( - context: Any, - key: str, - value: Any, - ) -> None: - """ - Make an XCom available for tasks to pull. - - :param context: Execution Context Dictionary - :param key: A key for the XCom - :param value: A value for the XCom. The value is pickled and stored - in the database. - """ - context["ti"].xcom_push(key=key, value=value) - - @staticmethod - @provide_session - def xcom_pull( - context: Any, - task_ids: str | list[str] | None = None, - dag_id: str | None = None, - key: str = XCOM_RETURN_KEY, - include_prior_dates: bool | None = None, - session: Session = NEW_SESSION, - ) -> Any: - """ - Pull XComs that optionally meet certain criteria. - - The default value for `key` limits the search to XComs - that were returned by other tasks (as opposed to those that were pushed - manually). To remove this filter, pass key=None (or any desired value). - - If a single task_id string is provided, the result is the value of the - most recent matching XCom from that task_id. If multiple task_ids are - provided, a tuple of matching values is returned. None is returned - whenever no matches are found. - - :param context: Execution Context Dictionary - :param key: A key for the XCom. If provided, only XComs with matching - keys will be returned. The default key is 'return_value', also - available as a constant XCOM_RETURN_KEY. This key is automatically - given to XComs returned by tasks (as opposed to being pushed - manually). To remove the filter, pass key=None. - :param task_ids: Only XComs from tasks with matching ids will be - pulled. Can pass None to remove the filter. - :param dag_id: If provided, only pulls XComs from this DAG. - If None (default), the DAG of the calling task is used. - :param include_prior_dates: If False, only XComs from the current - logical_date are returned. If True, XComs from previous dates - are returned as well. - """ - return context["ti"].xcom_pull( - key=key, - task_ids=task_ids, - dag_id=dag_id, - include_prior_dates=include_prior_dates, - session=session, - ) - - def serialize_for_task_group(self) -> tuple[DagAttributeTypes, Any]: - """Serialize; required by DAGNode.""" - return DagAttributeTypes.OP, self.task_id - - def unmap(self, resolve: None | dict[str, Any] | tuple[Context, Session]) -> BaseOperator: - """ - Get the "normal" operator from the current operator. - - Since a BaseOperator is not mapped to begin with, this simply returns - the original operator. - - :meta private: - """ - return self - - def expand_start_from_trigger(self, *, context: Context, session: Session) -> bool: - """ - Get the start_from_trigger value of the current abstract operator. - - Since a BaseOperator is not mapped to begin with, this simply returns - the original value of start_from_trigger. - - :meta private: - """ - return self.start_from_trigger - - def expand_start_trigger_args(self, *, context: Context, session: Session) -> StartTriggerArgs | None: - """ - Get the start_trigger_args value of the current abstract operator. - - Since a BaseOperator is not mapped to begin with, this simply returns - the original value of start_trigger_args. - - :meta private: - """ - return self.start_trigger_args - - if TYPE_CHECKING: - - @classmethod - def get_mapped_ti_count( - cls, node: DAGNode | MappedTaskGroup, run_id: str, *, session: Session - ) -> int: - """ - Return the number of mapped TaskInstances that can be created at run time. - - This considers both literal and non-literal mapped arguments, and the - result is therefore available when all depended tasks have finished. The - return value should be identical to ``parse_time_mapped_ti_count`` if - all mapped arguments are literal. - - :raise NotFullyPopulated: If upstream tasks are not all complete yet. - :raise NotMapped: If the operator is neither mapped, nor has any parent - mapped task groups. - :return: Total number of mapped TIs this task should have. - """ - else: - - @singledispatchmethod - @classmethod - def get_mapped_ti_count(cls, task: DAGNode, run_id: str, *, session: Session) -> int: - raise NotImplementedError(f"Not implemented for {type(task)}") - - # https://github.com/python/cpython/issues/86153 - # WHile we support Python 3.9 we can't rely on the type hint, we need to pass the type explicitly to - # register. - @get_mapped_ti_count.register(TaskSDKAbstractOperator) - @classmethod - def _(cls, task: TaskSDKAbstractOperator, run_id: str, *, session: Session) -> int: - group = task.get_closest_mapped_task_group() - if group is None: - raise NotMapped() - return cls.get_mapped_ti_count(group, run_id, session=session) - - @get_mapped_ti_count.register(MappedOperator) - @classmethod - def _(cls, task: MappedOperator, run_id: str, *, session: Session) -> int: - from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef - - exp_input = task._get_specified_expand_input() - if isinstance(exp_input, _ExpandInputRef): - exp_input = exp_input.deref(task.dag) - # TODO: TaskSDK This is only needed to support `dag.test()` etc until we port it over to use the - # task sdk runner. - if not hasattr(exp_input, "get_total_map_length"): - exp_input = _ExpandInputRef( - type(exp_input).EXPAND_INPUT_TYPE, - BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), - ) - exp_input = exp_input.deref(task.dag) - - current_count = exp_input.get_total_map_length(run_id, session=session) - - group = task.get_closest_mapped_task_group() - if group is None: - return current_count - parent_count = cls.get_mapped_ti_count(group, run_id, session=session) - return parent_count * current_count - - @get_mapped_ti_count.register(TaskGroup) - @classmethod - def _(cls, group: TaskGroup, run_id: str, *, session: Session) -> int: - """ - Return the number of instances a task in this group should be mapped to at run time. - - This considers both literal and non-literal mapped arguments, and the - result is therefore available when all depended tasks have finished. The - return value should be identical to ``parse_time_mapped_ti_count`` if - all mapped arguments are literal. - - If this group is inside mapped task groups, all the nested counts are - multiplied and accounted. - - :raise NotFullyPopulated: If upstream tasks are not all complete yet. - :return: Total number of mapped TIs this task should have. - """ - from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef - - def iter_mapped_task_group_lengths(group) -> Iterator[int]: - while group is not None: - if isinstance(group, MappedTaskGroup): - exp_input = group._expand_input - # TODO: TaskSDK This is only needed to support `dag.test()` etc until we port it over to use the - # task sdk runner. - if not hasattr(exp_input, "get_total_map_length"): - exp_input = _ExpandInputRef( - type(exp_input).EXPAND_INPUT_TYPE, - BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), - ) - exp_input = exp_input.deref(group.dag) - yield exp_input.get_total_map_length(run_id, session=session) - group = group.parent_group - - return functools.reduce(operator.mul, iter_mapped_task_group_lengths(group)) diff --git a/airflow-core/src/airflow/models/baseoperatorlink.py b/airflow-core/src/airflow/models/baseoperatorlink.py deleted file mode 100644 index 09d21f868515d..0000000000000 --- a/airflow-core/src/airflow/models/baseoperatorlink.py +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -"""Re exporting the new baseoperatorlink module from Task SDK for backward compatibility.""" - -from __future__ import annotations - -from airflow.sdk.bases.operatorlink import BaseOperatorLink as BaseOperatorLink diff --git a/airflow-core/src/airflow/models/connection.py b/airflow-core/src/airflow/models/connection.py index 2727c926e0f2a..782acbf4140de 100644 --- a/airflow-core/src/airflow/models/connection.py +++ b/airflow-core/src/airflow/models/connection.py @@ -21,6 +21,7 @@ import logging import re import sys +import warnings from contextlib import suppress from json import JSONDecodeError from typing import Any @@ -33,8 +34,8 @@ from airflow.exceptions import AirflowException, AirflowNotFoundException from airflow.models.base import ID_LEN, Base from airflow.models.crypto import get_fernet +from airflow.sdk import SecretCache from airflow.sdk.execution_time.secrets_masker import mask_secret -from airflow.secrets.cache import SecretCache from airflow.utils.helpers import prune_dict from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.module_loading import import_string @@ -44,7 +45,7 @@ # the symbols #,!,-,_,.,:,\,/ and () requiring at least one match. # # You can try the regex here: https://regex101.com/r/69033B/1 -RE_SANITIZE_CONN_ID = re.compile(r"^[\w\#\!\(\)\-\.\:\/\\]{1,}$") +RE_SANITIZE_CONN_ID = re.compile(r"^[\w#!()\-.:/\\]{1,}$") # the conn ID max len should be 250 CONN_ID_MAX_LEN: int = 250 @@ -266,11 +267,20 @@ def get_uri(self) -> str: if self.host and "://" in self.host: protocol, host = self.host.split("://", 1) + # If the protocol in host matches the connection type, don't add it again + if protocol == self.conn_type: + host_to_use = self.host + protocol_to_add = None + else: + # Different protocol, add it to the URI + host_to_use = host + protocol_to_add = protocol else: - protocol, host = None, self.host + host_to_use = self.host + protocol_to_add = None - if protocol: - uri += f"{protocol}://" + if protocol_to_add: + uri += f"{protocol_to_add}://" authority_block = "" if self.login is not None: @@ -285,8 +295,8 @@ def get_uri(self) -> str: uri += authority_block host_block = "" - if host: - host_block += quote(host, safe="") + if host_to_use: + host_block += quote(host_to_use, safe="") if self.port: if host_block == "" and authority_block == "": @@ -464,10 +474,15 @@ def get_connection_from_secrets(cls, conn_id: str) -> Connection: # If this is set it means are in some kind of execution context (Task, Dag Parse or Triggerer perhaps) # and should use the Task SDK API server path if hasattr(sys.modules.get("airflow.sdk.execution_time.task_runner"), "SUPERVISOR_COMMS"): - # TODO: AIP 72: Add deprecation here once we move this module to task sdk. from airflow.sdk import Connection as TaskSDKConnection from airflow.sdk.exceptions import AirflowRuntimeError, ErrorType + warnings.warn( + "Using Connection.get_connection_from_secrets from `airflow.models` is deprecated." + "Please use `from airflow.sdk import Connection` instead", + DeprecationWarning, + stacklevel=1, + ) try: conn = TaskSDKConnection.get(conn_id=conn_id) if isinstance(conn, TaskSDKConnection): @@ -478,8 +493,7 @@ def get_connection_from_secrets(cls, conn_id: str) -> Connection: return conn except AirflowRuntimeError as e: if e.error.error == ErrorType.CONNECTION_NOT_FOUND: - log.debug("Unable to retrieve connection from MetastoreBackend using Task SDK") - raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined") + raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined") from None raise # check cache first diff --git a/airflow-core/src/airflow/models/dag.py b/airflow-core/src/airflow/models/dag.py index 13a93bd7de31f..6d8d267fffe81 100644 --- a/airflow-core/src/airflow/models/dag.py +++ b/airflow-core/src/airflow/models/dag.py @@ -17,24 +17,17 @@ # under the License. from __future__ import annotations -import asyncio import copy import functools import logging import re -import sys -import time from collections import defaultdict -from collections.abc import Collection, Generator, Iterable, Sequence -from contextlib import ExitStack +from collections.abc import Callable, Collection, Generator, Iterable, Sequence from datetime import datetime, timedelta from functools import cache -from pathlib import Path -from re import Pattern from typing import ( TYPE_CHECKING, Any, - Callable, TypeVar, Union, cast, @@ -46,7 +39,6 @@ import pendulum import sqlalchemy_jsonfield from dateutil.relativedelta import relativedelta -from packaging import version as packaging_version from sqlalchemy import ( Boolean, Column, @@ -69,21 +61,20 @@ from sqlalchemy.sql import Select, expression from airflow import settings, utils +from airflow._shared.timezones import timezone from airflow.assets.evaluation import AssetEvaluator -from airflow.configuration import conf as airflow_conf, secrets_backend_list +from airflow.configuration import conf as airflow_conf from airflow.exceptions import ( AirflowException, - TaskDeferred, UnknownExecutorException, ) from airflow.executors.executor_loader import ExecutorLoader -from airflow.executors.workloads import BundleInfo +from airflow.models import Deadline from airflow.models.asset import ( AssetDagRunQueue, AssetModel, ) from airflow.models.base import Base, StringID -from airflow.models.baseoperator import BaseOperator from airflow.models.dag_version import DagVersion from airflow.models.dagrun import RUN_ID_REGEX, DagRun from airflow.models.taskinstance import ( @@ -95,10 +86,8 @@ from airflow.sdk import TaskGroup from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetUniqueKey, BaseAsset from airflow.sdk.definitions.dag import DAG as TaskSDKDag, dag as task_sdk_dag_decorator -from airflow.secrets.local_filesystem import LocalFilesystemBackend -from airflow.security import permissions +from airflow.sdk.definitions.deadline import DeadlineAlert, DeadlineReference from airflow.settings import json -from airflow.stats import Stats from airflow.timetables.base import DagRunInfo, DataInterval, TimeRestriction, Timetable from airflow.timetables.interval import CronDataIntervalTimetable, DeltaDataIntervalTimetable from airflow.timetables.simple import ( @@ -106,24 +95,25 @@ NullTimetable, OnceTimetable, ) -from airflow.utils import timezone from airflow.utils.context import Context -from airflow.utils.dag_cycle_tester import check_cycle from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime, lock_rows, with_row_locks -from airflow.utils.state import DagRunState, State, TaskInstanceState +from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunTriggeredByType, DagRunType if TYPE_CHECKING: + from typing import Literal, TypeAlias + + from pydantic import NonNegativeInt from sqlalchemy.orm.query import Query from sqlalchemy.orm.session import Session from airflow.models.dagbag import DagBag - from airflow.models.operator import Operator - from airflow.sdk.definitions._internal.abstractoperator import TaskStateChangeCallback - from airflow.serialization.serialized_objects import MaybeSerializedDAG - from airflow.typing_compat import Literal + from airflow.models.mappedoperator import MappedOperator + from airflow.serialization.serialized_objects import MaybeSerializedDAG, SerializedBaseOperator + + Operator: TypeAlias = MappedOperator | SerializedBaseOperator log = logging.getLogger(__name__) @@ -132,14 +122,9 @@ TAG_MAX_LEN = 100 DagStateChangeCallback = Callable[[Context], None] -ScheduleInterval = Union[None, str, timedelta, relativedelta] +ScheduleInterval = None | str | timedelta | relativedelta -ScheduleArg = Union[ - ScheduleInterval, - Timetable, - BaseAsset, - Collection[Union["Asset", "AssetAlias"]], -] +ScheduleArg = ScheduleInterval | Timetable | BaseAsset | Collection[Union["Asset", "AssetAlias"]] class InconsistentDataInterval(AirflowException): @@ -237,13 +222,6 @@ def get_asset_triggered_next_run_info( } -def _triggerer_is_healthy(session: Session): - from airflow.jobs.triggerer_job_runner import TriggererJobRunner - - job = TriggererJobRunner.most_recent_job(session=session) - return job and job.is_alive() - - @provide_session def _create_orm_dagrun( *, @@ -257,8 +235,9 @@ def _create_orm_dagrun( state: DagRunState | None, run_type: DagRunType, creating_job_id: int | None, - backfill_id: int | None, + backfill_id: NonNegativeInt | None, triggered_by: DagRunTriggeredByType, + triggering_user_name: str | None = None, session: Session = NEW_SESSION, ) -> DagRun: bundle_version = None @@ -267,6 +246,9 @@ def _create_orm_dagrun( select(DagModel.bundle_version).where(DagModel.dag_id == dag.dag_id), ) dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) + if not dag_version: + raise AirflowException(f"Cannot create DagRun for DAG {dag.dag_id} because the dag is not serialized") + run = DagRun( dag_id=dag.dag_id, run_id=run_id, @@ -279,6 +261,7 @@ def _create_orm_dagrun( creating_job_id=creating_job_id, data_interval=data_interval, triggered_by=triggered_by, + triggering_user_name=triggering_user_name, backfill_id=backfill_id, bundle_version=bundle_version, ) @@ -291,7 +274,7 @@ def _create_orm_dagrun( run.dag = dag # create the associated task instances # state is None at the moment of creation - run.verify_integrity(session=session, dag_version_id=dag_version.id if dag_version else None) + run.verify_integrity(session=session, dag_version_id=dag_version.id) return run @@ -384,6 +367,19 @@ class DAG(TaskSDKDag, LoggingMixin): :param dagrun_timeout: Specify the duration a DagRun should be allowed to run before it times out or fails. Task instances that are running when a DagRun is timed out will be marked as skipped. :param sla_miss_callback: DEPRECATED - The SLA feature is removed in Airflow 3.0, to be replaced with a new implementation in 3.1 + :param deadline: Optional Deadline Alert for the DAG. + Specifies a time by which the DAG run should be complete, either in the form of a static datetime + or calculated relative to a reference timestamp. If the deadline passes before completion, the + provided callback is triggered. + + **Example**: To set the deadline for one hour after the DAG run starts you could use :: + + DeadlineAlert( + reference=DeadlineReference.DAGRUN_LOGICAL_DATE, + interval=timedelta(hours=1), + callback=my_callback, + ) + :param catchup: Perform scheduler catchup (or only run latest)? Defaults to False :param on_failure_callback: A function or list of functions to be called when a DagRun of this dag fails. A context dictionary is passed as a single parameter to this function. @@ -465,27 +461,14 @@ def _upgrade_outdated_dag_access_control(access_control=None): """Look for outdated dag level actions in DAG access_controls and replace them with updated actions.""" if access_control is None: return None - - from airflow.providers.fab import __version__ as FAB_VERSION - updated_access_control = {} for role, perms in access_control.items(): - if packaging_version.parse(FAB_VERSION) >= packaging_version.parse("1.3.0"): - updated_access_control[role] = updated_access_control.get(role, {}) - if isinstance(perms, (set, list)): - # Support for old-style access_control where only the actions are specified - updated_access_control[role][permissions.RESOURCE_DAG] = set(perms) - else: - updated_access_control[role] = perms - elif isinstance(perms, dict): - # Not allow new access control format with old FAB versions - raise AirflowException( - "Please upgrade the FAB provider to a version >= 1.3.0 to allow " - "use the Dag Level Access Control new format." - ) + updated_access_control[role] = updated_access_control.get(role, {}) + if isinstance(perms, (set, list)): + # Support for old-style access_control where only the actions are specified + updated_access_control[role]["DAGs"] = set(perms) else: - updated_access_control[role] = set(perms) - + updated_access_control[role] = perms return updated_access_control def get_next_data_interval(self, dag_model: DagModel) -> DataInterval | None: @@ -712,15 +695,6 @@ def get_last_dagrun(self, session=NEW_SESSION, include_manually_triggered=False) self.dag_id, session=session, include_manually_triggered=include_manually_triggered ) - @provide_session - def has_dag_runs(self, session=NEW_SESSION, include_manually_triggered=True) -> bool: - return ( - get_last_dagrun( - self.dag_id, session=session, include_manually_triggered=include_manually_triggered - ) - is not None - ) - @property def dag_id(self) -> str: return self._dag_id @@ -729,10 +703,6 @@ def dag_id(self) -> str: def dag_id(self, value: str) -> None: self._dag_id = value - @property - def timetable_summary(self) -> str: - return self.timetable.summary - @provide_session def get_concurrency_reached(self, session=NEW_SESSION) -> bool: """Return a boolean indicating whether the max_active_tasks limit for this DAG has been reached.""" @@ -776,89 +746,6 @@ def get_serialized_fields(cls): """Stringified DAGs and operators contain exactly these fields.""" return TaskSDKDag.get_serialized_fields() | {"_processor_dags_folder"} - @staticmethod - @provide_session - def fetch_callback( - dag: DAG, - run_id: str, - success: bool = True, - reason: str | None = None, - *, - session: Session = NEW_SESSION, - ) -> tuple[list[TaskStateChangeCallback], Context] | None: - """ - Fetch the appropriate callbacks depending on the value of success. - - This method gets the context of a single TaskInstance part of this DagRun and returns it along - the list of callbacks. - - :param dag: DAG object - :param run_id: The DAG run ID - :param success: Flag to specify if failure or success callback should be called - :param reason: Completion reason - :param session: Database session - """ - callbacks = dag.on_success_callback if success else dag.on_failure_callback - if callbacks: - dagrun = DAG.fetch_dagrun(dag_id=dag.dag_id, run_id=run_id, session=session) - callbacks = callbacks if isinstance(callbacks, list) else [callbacks] - tis = dagrun.get_task_instances(session=session) - # tis from a dagrun may not be a part of dag.partial_subset, - # since dag.partial_subset is a subset of the dag. - # This ensures that we will only use the accessible TI - # context for the callback. - if dag.partial: - tis = [ti for ti in tis if not ti.state == State.NONE] - # filter out removed tasks - tis = [ti for ti in tis if ti.state != TaskInstanceState.REMOVED] - ti = tis[-1] # get first TaskInstance of DagRun - ti.task = dag.get_task(ti.task_id) - context = ti.get_template_context(session=session) - context["reason"] = reason - return callbacks, context - return None - - @provide_session - def handle_callback(self, dagrun: DagRun, success=True, reason=None, session=NEW_SESSION): - """ - Triggers on_failure_callback or on_success_callback as appropriate. - - This method gets the context of a single TaskInstance part of this DagRun - and passes that to the callable along with a 'reason', primarily to - differentiate DagRun failures. - - .. note: The logs end up in - ``$AIRFLOW_HOME/logs/scheduler/latest/PROJECT/DAG_FILE.py.log`` - - :param dagrun: DagRun object - :param success: Flag to specify if failure or success callback should be called - :param reason: Completion reason - :param session: Database session - """ - callbacks, context = DAG.fetch_callback( - dag=self, run_id=dagrun.run_id, success=success, reason=reason, session=session - ) or (None, None) - - DAG.execute_callback(callbacks, context, self.dag_id) - - @classmethod - def execute_callback(cls, callbacks: list[Callable] | None, context: Context | None, dag_id: str): - """ - Triggers the callbacks with the given context. - - :param callbacks: List of callbacks to call - :param context: Context to pass to all callbacks - :param dag_id: The dag_id of the DAG to find. - """ - if callbacks and context: - for callback in callbacks: - cls.logger().info("Executing dag callback function: %s", callback) - try: - callback(context) - except Exception: - cls.logger().exception("failed to invoke dag state update callback") - Stats.incr("dag.callback_exceptions", tags={"dag_id": dag_id}) - def get_active_runs(self): """ Return a list of dag run logical dates currently running. @@ -954,7 +841,7 @@ def get_task_instances( self, start_date: datetime | None = None, end_date: datetime | None = None, - state: list[TaskInstanceState] | None = None, + state: TaskInstanceState | Sequence[TaskInstanceState] | None = None, session: Session = NEW_SESSION, ) -> list[TaskInstance]: if not start_date: @@ -1224,7 +1111,9 @@ def set_task_instance_state( """ from airflow.api.common.mark_tasks import set_state - task = self.get_task(task_id) + # TODO (GH-52141): get_task in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + task = cast("Operator", self.get_task(task_id)) task.dag = self tasks_to_set_state: list[Operator | tuple[Operator, int]] @@ -1251,7 +1140,7 @@ def set_task_instance_state( # Clear downstream tasks that are in failed/upstream_failed state to resume them. # Flush the session so that the tasks marked success are reflected in the db. session.flush() - subdag = self.partial_subset( + subset = self.partial_subset( task_ids={task_id}, include_downstream=True, include_upstream=False, @@ -1273,9 +1162,9 @@ def set_task_instance_state( } if not future and not past: # Simple case 1: we're only dealing with exactly one run. clear_kwargs["run_id"] = run_id - subdag.clear(**clear_kwargs) + subset.clear(**clear_kwargs) elif future and past: # Simple case 2: we're clearing ALL runs. - subdag.clear(**clear_kwargs) + subset.clear(**clear_kwargs) else: # Complex cases: we may have more than one run, based on a date range. # Make 'future' and 'past' make some sense when multiple runs exist # for the same logical date. We order runs by their id and only @@ -1287,7 +1176,7 @@ def set_task_instance_state( else: clear_kwargs["end_date"] = logical_date exclude_run_id_stmt = exclude_run_id_stmt.where(DagRun.id < dr_id) - subdag.clear(exclude_run_ids=frozenset(session.scalars(exclude_run_id_stmt)), **clear_kwargs) + subset.clear(exclude_run_ids=frozenset(session.scalars(exclude_run_id_stmt)), **clear_kwargs) return altered @provide_session @@ -1318,9 +1207,10 @@ def set_task_group_state( :param session: new session """ from airflow.api.common.mark_tasks import set_state + from airflow.serialization.serialized_objects import SerializedBaseOperator as BaseOperator - tasks_to_set_state: list[BaseOperator | tuple[BaseOperator, int]] = [] - task_ids: list[str] = [] + tasks_to_set_state: list + task_ids: list[str] task_group_dict = self.task_group.get_task_group_dict() task_group = task_group_dict.get(group_id) @@ -1363,13 +1253,13 @@ def get_logical_date() -> datetime: # Clear downstream tasks that are in failed/upstream_failed state to resume them. # Flush the session so that the tasks marked success are reflected in the db. session.flush() - task_subset = self.partial_subset( + subset = self.partial_subset( task_ids=task_ids, include_downstream=True, include_upstream=False, ) - task_subset.clear( + subset.clear( start_date=start_date, end_date=end_date, only_failed=True, @@ -1395,6 +1285,7 @@ def clear( dag_bag: DagBag | None = None, exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), exclude_run_ids: frozenset[str] | None = frozenset(), + run_on_latest_version: bool = False, ) -> list[TaskInstance]: ... # pragma: no cover @overload @@ -1412,6 +1303,7 @@ def clear( dag_bag: DagBag | None = None, exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), exclude_run_ids: frozenset[str] | None = frozenset(), + run_on_latest_version: bool = False, ) -> int: ... # pragma: no cover @overload @@ -1430,6 +1322,7 @@ def clear( dag_bag: DagBag | None = None, exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), exclude_run_ids: frozenset[str] | None = frozenset(), + run_on_latest_version: bool = False, ) -> list[TaskInstance]: ... # pragma: no cover @overload @@ -1448,6 +1341,7 @@ def clear( dag_bag: DagBag | None = None, exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), exclude_run_ids: frozenset[str] | None = frozenset(), + run_on_latest_version: bool = False, ) -> int: ... # pragma: no cover @provide_session @@ -1467,6 +1361,7 @@ def clear( dag_bag: DagBag | None = None, exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(), exclude_run_ids: frozenset[str] | None = frozenset(), + run_on_latest_version: bool = False, ) -> int | Iterable[TaskInstance]: """ Clear a set of task instances associated with the current dag for a specified date range. @@ -1481,6 +1376,7 @@ def clear( :param dag_run_state: state to set DagRun to. If set to False, dagrun state will not be changed. :param dry_run: Find the tasks to clear but don't clear them. + :param run_on_latest_version: whether to run on latest serialized DAG and Bundle version :param session: The sqlalchemy session to use :param dag_bag: The DagBag used to find the dags (Optional) :param exclude_task_ids: A set of ``task_id`` or (``task_id``, ``map_index``) @@ -1525,8 +1421,8 @@ def clear( clear_task_instances( list(tis), session, - dag=self, dag_run_state=dag_run_state, + run_on_latest_version=run_on_latest_version, ) else: count = 0 @@ -1549,6 +1445,8 @@ def clear_dags( ): all_tis = [] for dag in dags: + if not isinstance(dag, DAG): + dag = DAG.from_sdk_dag(dag) tis = dag.clear( start_date=start_date, end_date=end_date, @@ -1575,6 +1473,8 @@ def clear_dags( if do_it: for dag in dags: + if not isinstance(dag, DAG): + dag = DAG.from_sdk_dag(dag) dag.clear( start_date=start_date, end_date=end_date, @@ -1589,198 +1489,6 @@ def clear_dags( print("Cancelled, nothing was cleared.") return count - def cli(self): - """Exposes a CLI specific to this DAG.""" - check_cycle(self) - - from airflow.cli import cli_parser - - parser = cli_parser.get_parser(dag_parser=True) - args = parser.parse_args() - args.func(args, self) - - @provide_session - def test( - self, - run_after: datetime | None = None, - logical_date: datetime | None = None, - run_conf: dict[str, Any] | None = None, - conn_file_path: str | None = None, - variable_file_path: str | None = None, - use_executor: bool = False, - mark_success_pattern: Pattern | str | None = None, - session: Session = NEW_SESSION, - ) -> DagRun: - """ - Execute one single DagRun for a given DAG and logical date. - - :param run_after: the datetime before which to Dag cannot run. - :param logical_date: logical date for the DAG run - :param run_conf: configuration to pass to newly created dagrun - :param conn_file_path: file path to a connection file in either yaml or json - :param variable_file_path: file path to a variable file in either yaml or json - :param use_executor: if set, uses an executor to test the DAG - :param mark_success_pattern: regex of task_ids to mark as success instead of running - :param session: database connection (optional) - """ - from airflow.serialization.serialized_objects import SerializedDAG - - def add_logger_if_needed(ti: TaskInstance): - """ - Add a formatted logger to the task instance. - - This allows all logs to surface to the command line, instead of into - a task file. Since this is a local test run, it is much better for - the user to see logs in the command line, rather than needing to - search for a log file. - - :param ti: The task instance that will receive a logger. - """ - format = logging.Formatter("[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s") - handler = logging.StreamHandler(sys.stdout) - handler.level = logging.INFO - handler.setFormatter(format) - # only add log handler once - if not any(isinstance(h, logging.StreamHandler) for h in ti.log.handlers): - self.log.debug("Adding Streamhandler to taskinstance %s", ti.task_id) - ti.log.addHandler(handler) - - exit_stack = ExitStack() - if conn_file_path or variable_file_path: - local_secrets = LocalFilesystemBackend( - variables_file_path=variable_file_path, connections_file_path=conn_file_path - ) - secrets_backend_list.insert(0, local_secrets) - exit_stack.callback(lambda: secrets_backend_list.pop(0)) - - with exit_stack: - self.validate() - self.log.debug("Clearing existing task instances for logical date %s", logical_date) - self.clear( - start_date=logical_date, - end_date=logical_date, - dag_run_state=False, # type: ignore - session=session, - ) - self.log.debug("Getting dagrun for dag %s", self.dag_id) - logical_date = timezone.coerce_datetime(logical_date) - run_after = timezone.coerce_datetime(run_after) or timezone.coerce_datetime(timezone.utcnow()) - data_interval = ( - self.timetable.infer_manual_data_interval(run_after=logical_date) if logical_date else None - ) - scheduler_dag = SerializedDAG.deserialize_dag(SerializedDAG.serialize_dag(self)) - - dr: DagRun = _get_or_create_dagrun( - dag=scheduler_dag, - start_date=logical_date or run_after, - logical_date=logical_date, - data_interval=data_interval, - run_after=run_after, - run_id=DagRun.generate_run_id( - run_type=DagRunType.MANUAL, - logical_date=logical_date, - run_after=run_after, - ), - session=session, - conf=run_conf, - triggered_by=DagRunTriggeredByType.TEST, - ) - # Start a mock span so that one is present and not started downstream. We - # don't care about otel in dag.test and starting the span during dagrun update - # is not functioning properly in this context anyway. - dr.start_dr_spans_if_needed(tis=[]) - - tasks = self.task_dict - self.log.debug("starting dagrun") - # Instead of starting a scheduler, we run the minimal loop possible to check - # for task readiness and dependency management. - - # ``Dag.test()`` works in two different modes depending on ``use_executor``: - # - if ``use_executor`` is False, runs the task locally with no executor using ``_run_task`` - # - if ``use_executor`` is True, sends the task instances to the executor with - # ``BaseExecutor.queue_task_instance`` - if use_executor: - from airflow.models.dagbag import DagBag - - dag_bag = DagBag() - dag_bag.bag_dag(self) - - executor = ExecutorLoader.get_default_executor() - executor.start() - - while dr.state == DagRunState.RUNNING: - session.expire_all() - schedulable_tis, _ = dr.update_state(session=session) - for s in schedulable_tis: - if s.state != TaskInstanceState.UP_FOR_RESCHEDULE: - s.try_number += 1 - s.state = TaskInstanceState.SCHEDULED - s.scheduled_dttm = timezone.utcnow() - session.commit() - # triggerer may mark tasks scheduled so we read from DB - all_tis = set(dr.get_task_instances(session=session)) - scheduled_tis = {x for x in all_tis if x.state == TaskInstanceState.SCHEDULED} - ids_unrunnable = {x for x in all_tis if x.state not in State.finished} - scheduled_tis - if not scheduled_tis and ids_unrunnable: - self.log.warning("No tasks to run. unrunnable tasks: %s", ids_unrunnable) - time.sleep(1) - - triggerer_running = _triggerer_is_healthy(session) - for ti in scheduled_tis: - ti.task = tasks[ti.task_id] - - mark_success = ( - re.compile(mark_success_pattern).fullmatch(ti.task_id) is not None - if mark_success_pattern is not None - else False - ) - - if use_executor: - if executor.has_task(ti): - continue - # TODO: Task-SDK: This check is transitionary. Remove once all executors are ported over. - from airflow.executors import workloads - from airflow.executors.base_executor import BaseExecutor - - if executor.queue_workload.__func__ is not BaseExecutor.queue_workload: # type: ignore[attr-defined] - workload = workloads.ExecuteTask.make( - ti, - dag_rel_path=Path(self.fileloc), - generator=executor.jwt_generator, - # For the system test/debug purpose, we use the default bundle which uses - # local file system. If it turns out to be a feature people want, we could - # plumb the Bundle to use as a parameter to dag.test - bundle_info=BundleInfo(name="dags-folder"), - ) - executor.queue_workload(workload, session=session) - ti.state = TaskInstanceState.QUEUED - session.commit() - else: - # Send the task to the executor - executor.queue_task_instance(ti, ignore_ti_state=True) - else: - # Run the task locally - try: - add_logger_if_needed(ti) - _run_task( - ti=ti, - inline_trigger=not triggerer_running, - session=session, - mark_success=mark_success, - ) - except Exception: - self.log.exception("Task failed; ti=%s", ti) - if use_executor: - executor.heartbeat() - from airflow.jobs.scheduler_job_runner import SchedulerDagBag, SchedulerJobRunner - - SchedulerJobRunner.process_executor_events( - executor=executor, job_id=None, scheduler_dag_bag=SchedulerDagBag(), session=session - ) - if use_executor: - executor.end() - return dr - @provide_session def create_dagrun( self, @@ -1792,19 +1500,26 @@ def create_dagrun( conf: dict | None = None, run_type: DagRunType, triggered_by: DagRunTriggeredByType, + triggering_user_name: str | None = None, state: DagRunState, start_date: datetime | None = None, creating_job_id: int | None = None, - backfill_id: int | None = None, + backfill_id: NonNegativeInt | None = None, session: Session = NEW_SESSION, ) -> DagRun: """ Create a run for this DAG to run its tasks. - :param start_date: the date this dag run should be evaluated + :param run_id: ID of the dag_run + :param logical_date: date of execution + :param run_after: the datetime before which dag won't run :param conf: Dict containing configuration/parameters to pass to the DAG + :param triggered_by: the entity which triggers the dag_run + :param triggering_user_name: the user name who triggers the dag_run + :param start_date: the date this dag run should be evaluated :param creating_job_id: ID of the job creating this DagRun :param backfill_id: ID of the backfill run if one exists + :param session: Unused. Only added in compatibility with database isolation mode :return: The created DAG run. :meta private: @@ -1847,15 +1562,12 @@ def create_dagrun( # todo: AIP-78 add verification that if run type is backfill then we have a backfill id - if TYPE_CHECKING: - # TODO: Task-SDK: remove this assert - assert self.params # create a copy of params before validating copied_params = copy.deepcopy(self.params) if conf: copied_params.update(conf) copied_params.validate() - return _create_orm_dagrun( + orm_dagrun = _create_orm_dagrun( dag=self, run_id=run_id, logical_date=logical_date, @@ -1868,9 +1580,26 @@ def create_dagrun( creating_job_id=creating_job_id, backfill_id=backfill_id, triggered_by=triggered_by, + triggering_user_name=triggering_user_name, session=session, ) + if self.deadline and isinstance(self.deadline.reference, DeadlineReference.TYPES.DAGRUN): + session.add( + Deadline( + deadline_time=self.deadline.reference.evaluate_with( + session=session, + interval=self.deadline.interval, + dag_id=self.dag_id, + run_id=run_id, + ), + callback=self.deadline.callback, + dagrun_id=orm_dagrun.id, + ) + ) + + return orm_dagrun + @classmethod @provide_session def bulk_write_to_db( @@ -1894,7 +1623,7 @@ def bulk_write_to_db( log.info("Sync %s DAGs", len(dags)) dag_op = DagModelOperation( bundle_name=bundle_name, bundle_version=bundle_version, dags={d.dag_id: d for d in dags} - ) # type: ignore[misc] + ) orm_dags = dag_op.add_dags(session=session) dag_op.update_dags(orm_dags, session=session) @@ -1910,11 +1639,11 @@ def bulk_write_to_db( asset_op.add_dag_asset_alias_references(orm_dags, orm_asset_aliases, session=session) asset_op.add_dag_asset_name_uri_references(session=session) asset_op.add_task_asset_references(orm_dags, orm_assets, session=session) - asset_op.add_asset_trigger_references(orm_assets, session=session) asset_op.activate_assets_if_possible(orm_assets.values(), session=session) + session.flush() # Activation is needed when we add trigger references. + asset_op.add_asset_trigger_references(orm_assets, session=session) dag_op.update_dag_asset_expression(orm_dags=orm_dags, orm_assets=orm_assets) - session.flush() @provide_session @@ -2014,7 +1743,7 @@ def get_task_assets( of_type: type[AssetT] = Asset, # type: ignore[assignment] ) -> Generator[tuple[str, AssetT], None, None]: for task in self.task_dict.values(): - directions = ("inlets",) if inlets else () + directions: tuple[str, ...] = ("inlets",) if inlets else () if outlets: directions += ("outlets",) for direction in directions: @@ -2041,16 +1770,7 @@ def from_sdk_dag(cls, dag: TaskSDKDag) -> DAG: if not field.init or field.name in ["edge_info"]: continue - value = getattr(dag, field.name) - - # Handle special cases where values need conversion - if field.name == "max_consecutive_failed_dag_runs": - # SchedulerDAG requires this to be >= 0, while TaskSDKDag allows -1 - if value == -1: - # If it is -1, we get the default value from the DAG - continue - - kwargs[field.name] = value + kwargs[field.name] = getattr(dag, field.name) new_dag = cls(**kwargs) @@ -2075,7 +1795,7 @@ def create_tasks(task): if isinstance(task, TaskGroup): return task_group_map[task.group_id] - new_task = copy.deepcopy(task) + new_task = copy.copy(task) # Only overwrite the specific attributes we want to change new_task.task_id = task.task_id @@ -2149,7 +1869,7 @@ class DagModel(Base): __tablename__ = "dag" """ - These items are stored in the database for state related information + These items are stored in the database for state related information. """ dag_id = Column(StringID(), primary_key=True) # A DAG can be paused from the UI / DB @@ -2184,6 +1904,8 @@ class DagModel(Base): timetable_description = Column(String(1000), nullable=True) # Asset expression based on asset triggers asset_expression = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + # DAG deadline information + _deadline = Column("deadline", sqlalchemy_jsonfield.JSONField(json=json), nullable=True) # Tags for view filter tags = relationship("DagTag", cascade="all, delete, delete-orphan", backref=backref("dag")) # Dag owner links for DAGs view @@ -2231,6 +1953,10 @@ class DagModel(Base): cascade="all, delete, delete-orphan", ) schedule_assets = association_proxy("schedule_asset_references", "asset") + task_inlet_asset_references = relationship( + "TaskInletAssetReference", + cascade="all, delete, delete-orphan", + ) task_outlet_asset_references = relationship( "TaskOutletAssetReference", cascade="all, delete, delete-orphan", @@ -2277,6 +2003,16 @@ def next_dagrun_data_interval(self, value: tuple[datetime, datetime] | None) -> else: self.next_dagrun_data_interval_start, self.next_dagrun_data_interval_end = value + @property + def deadline(self): + """Get the deserialized deadline alert.""" + return DeadlineAlert.deserialize_deadline_alert(self._deadline) if self._deadline else None + + @deadline.setter + def deadline(self, value): + """Set and serialize the deadline alert.""" + self._deadline = value if isinstance(value, dict) else value.serialize_deadline_alert() + @property def timezone(self): return settings.TIMEZONE @@ -2540,47 +2276,6 @@ def get_asset_triggered_next_run_info(self, *, session=NEW_SESSION) -> dict[str, """:sphinx-autoapi-skip:""" -def _run_inline_trigger(trigger): - async def _run_inline_trigger_main(): - # We can replace it with `return await anext(trigger.run(), default=None)` - # when we drop support for Python 3.9 - try: - return await trigger.run().__anext__() - except StopAsyncIteration: - return None - - return asyncio.run(_run_inline_trigger_main()) - - -def _run_task( - *, ti: TaskInstance, inline_trigger: bool = False, mark_success: bool = False, session: Session -): - """ - Run a single task instance, and push result to Xcom for downstream tasks. - - Bypasses a lot of extra steps used in `task.run` to keep our local running as fast as - possible. This function is only meant for the `dag.test` function as a helper function. - - Args: - ti: TaskInstance to run - """ - log.info("[DAG TEST] starting task_id=%s map_index=%s", ti.task_id, ti.map_index) - while True: - try: - log.info("[DAG TEST] running task %s", ti) - ti._run_raw_task(session=session, raise_on_defer=inline_trigger, mark_success=mark_success) - break - except TaskDeferred as e: - log.info("[DAG TEST] running trigger in line") - event = _run_inline_trigger(e.trigger) - ti.next_method = e.method_name - ti.next_kwargs = {"event": event.payload} if event else e.kwargs - log.info("[DAG TEST] Trigger completed") - session.merge(ti) - session.commit() - log.info("[DAG TEST] end task task_id=%s map_index=%s", ti.task_id, ti.map_index) - - def _get_or_create_dagrun( *, dag: DAG, @@ -2590,6 +2285,7 @@ def _get_or_create_dagrun( run_after: datetime, conf: dict | None, triggered_by: DagRunTriggeredByType, + triggering_user_name: str | None, start_date: datetime, session: Session, ) -> DagRun: @@ -2604,6 +2300,7 @@ def _get_or_create_dagrun( :param logical_date: Logical date for finding an existing run. :param run_id: Run ID for the new DAG run. :param triggered_by: the entity which triggers the dag_run + :param triggering_user_name: the user name who triggers the dag_run :return: The newly created DAG run. """ @@ -2622,6 +2319,7 @@ def _get_or_create_dagrun( run_type=DagRunType.MANUAL, state=DagRunState.RUNNING, triggered_by=triggered_by, + triggering_user_name=triggering_user_name, start_date=start_date or logical_date, session=session, ) diff --git a/airflow-core/src/airflow/models/dag_favorite.py b/airflow-core/src/airflow/models/dag_favorite.py new file mode 100644 index 0000000000000..5dfb742fdaf80 --- /dev/null +++ b/airflow-core/src/airflow/models/dag_favorite.py @@ -0,0 +1,31 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from sqlalchemy import Column, ForeignKey + +from airflow.models.base import Base, StringID + + +class DagFavorite(Base): + """Association table model linking users to their favorite DAGs.""" + + __tablename__ = "dag_favorite" + + user_id = Column(StringID(), primary_key=True) + dag_id = Column(StringID(), ForeignKey("dag.dag_id", ondelete="CASCADE"), primary_key=True) diff --git a/airflow-core/src/airflow/models/dag_version.py b/airflow-core/src/airflow/models/dag_version.py index 529d5b7d4eefe..a51f3cb301867 100644 --- a/airflow-core/src/airflow/models/dag_version.py +++ b/airflow-core/src/airflow/models/dag_version.py @@ -22,11 +22,11 @@ import uuid6 from sqlalchemy import Column, ForeignKey, Integer, UniqueConstraint, select -from sqlalchemy.orm import relationship +from sqlalchemy.orm import joinedload, relationship from sqlalchemy_utils import UUIDType +from airflow._shared.timezones import timezone from airflow.models.base import Base, StringID -from airflow.utils import timezone from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime, with_row_locks @@ -108,11 +108,14 @@ def write_dag( ) log.debug("Writing DagVersion %s to the DB", dag_version) session.add(dag_version) + session.commit() log.debug("DagVersion %s written to the DB", dag_version) return dag_version @classmethod - def _latest_version_select(cls, dag_id: str, bundle_version: str | None = None) -> Select: + def _latest_version_select( + cls, dag_id: str, bundle_version: str | None = None, load_dag_model: bool = False + ) -> Select: """ Get the select object to get the latest version of the DAG. @@ -122,22 +125,34 @@ def _latest_version_select(cls, dag_id: str, bundle_version: str | None = None) query = select(cls).where(cls.dag_id == dag_id) if bundle_version: query = query.where(cls.bundle_version == bundle_version) + + if load_dag_model: + query = query.options(joinedload(cls.dag_model)) + query = query.order_by(cls.created_at.desc()).limit(1) return query @classmethod @provide_session def get_latest_version( - cls, dag_id: str, *, bundle_version: str | None = None, session: Session = NEW_SESSION + cls, + dag_id: str, + *, + bundle_version: str | None = None, + load_dag_model: bool = False, + session: Session = NEW_SESSION, ) -> DagVersion | None: """ Get the latest version of the DAG. :param dag_id: The DAG ID. :param session: The database session. + :param load_dag_model: Whether to load the DAG model. :return: The latest version of the DAG or None if not found. """ - return session.scalar(cls._latest_version_select(dag_id, bundle_version=bundle_version)) + return session.scalar( + cls._latest_version_select(dag_id, bundle_version=bundle_version, load_dag_model=load_dag_model) + ) @classmethod @provide_session diff --git a/airflow-core/src/airflow/models/dagbag.py b/airflow-core/src/airflow/models/dagbag.py index 393d01ce7a2c2..c84c1ae31ffcc 100644 --- a/airflow-core/src/airflow/models/dagbag.py +++ b/airflow-core/src/airflow/models/dagbag.py @@ -17,14 +17,17 @@ # under the License. from __future__ import annotations +import contextlib import hashlib import importlib import importlib.machinery import importlib.util import os +import signal import sys import textwrap import traceback +import warnings import zipfile from datetime import datetime, timedelta from pathlib import Path @@ -33,24 +36,27 @@ from sqlalchemy import ( Column, String, + inspect, ) +from sqlalchemy.orm import joinedload +from sqlalchemy.orm.attributes import NO_VALUE from tabulate import tabulate from airflow import settings +from airflow._shared.timezones import timezone from airflow.configuration import conf from airflow.exceptions import ( AirflowClusterPolicyError, AirflowClusterPolicySkipDag, AirflowClusterPolicyViolation, - AirflowDagCycleException, AirflowDagDuplicatedIdException, AirflowException, + AirflowTaskTimeout, ) from airflow.listeners.listener import get_listener_manager from airflow.models.base import Base, StringID +from airflow.models.dag_version import DagVersion from airflow.stats import Stats -from airflow.utils import timezone -from airflow.utils.dag_cycle_tester import check_cycle from airflow.utils.docs import get_docs_url from airflow.utils.file import ( correct_maybe_zipped, @@ -60,18 +66,43 @@ ) from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.session import NEW_SESSION, provide_session -from airflow.utils.timeout import timeout from airflow.utils.types import NOTSET -from airflow.utils.warnings import capture_with_reraise + +try: + from airflow.sdk.exceptions import AirflowDagCycleException +except ImportError: + from airflow.exceptions import AirflowDagCycleException # type: ignore[no-redef] if TYPE_CHECKING: + from collections.abc import Generator + from sqlalchemy.orm import Session + from airflow.models import DagRun from airflow.models.dag import DAG from airflow.models.dagwarning import DagWarning from airflow.utils.types import ArgNotSet +@contextlib.contextmanager +def _capture_with_reraise() -> Generator[list[warnings.WarningMessage], None, None]: + """Capture warnings in context and re-raise it on exit from the context manager.""" + captured_warnings = [] + try: + with warnings.catch_warnings(record=True) as captured_warnings: + yield captured_warnings + finally: + if captured_warnings: + for cw in captured_warnings: + warnings.warn_explicit( + message=cw.message, + category=cw.category, + filename=cw.filename, + lineno=cw.lineno, + source=cw.source, + ) + + class FileLoadStat(NamedTuple): """ Information about single file. @@ -92,6 +123,30 @@ class FileLoadStat(NamedTuple): warning_num: int +@contextlib.contextmanager +def timeout(seconds=1, error_message="Timeout"): + import logging + + log = logging.getLogger(__name__) + error_message = error_message + ", PID: " + str(os.getpid()) + + def handle_timeout(signum, frame): + """Log information and raises AirflowTaskTimeout.""" + log.error("Process timed out, PID: %s", str(os.getpid())) + raise AirflowTaskTimeout(error_message) + + try: + try: + signal.signal(signal.SIGALRM, handle_timeout) + signal.setitimer(signal.ITIMER_REAL, seconds) + except ValueError: + log.warning("timeout can't be used in the current context", exc_info=True) + yield + finally: + with contextlib.suppress(ValueError): + signal.setitimer(signal.ITIMER_REAL, 0) + + class DagBag(LoggingMixin): """ A dagbag is a collection of dags, parsed out of a folder tree and has high level configuration settings. @@ -129,7 +184,7 @@ def __init__( bundle_path: Path | None = None, ): super().__init__() - self.bundle_path: Path | None = bundle_path + self.bundle_path = bundle_path include_examples = ( include_examples if isinstance(include_examples, bool) @@ -144,6 +199,7 @@ def __init__( self.dags: dict[str, DAG] = {} # the file's last modified timestamp when we last read it self.file_last_changed: dict[str, datetime] = {} + # Store import errors with relative file paths as keys (relative to bundle_path) self.import_errors: dict[str, str] = {} self.captured_warnings: dict[str, tuple[str, ...]] = {} self.has_logged = False @@ -235,23 +291,21 @@ def get_dag(self, dag_id, session: Session = None): # If asking for a known subdag, we want to refresh the parent dag = None - root_dag_id = dag_id if dag_id in self.dags: dag = self.dags[dag_id] # If DAG Model is absent, we can't check last_expired property. Is the DAG not yet synchronized? - orm_dag = DagModel.get_current(root_dag_id, session=session) + orm_dag = DagModel.get_current(dag_id, session=session) if not orm_dag: return self.dags.get(dag_id) - # If the dag corresponding to root_dag_id is absent or expired - is_missing = root_dag_id not in self.dags + is_missing = dag_id not in self.dags is_expired = ( orm_dag.last_expired and dag and dag.last_loaded and dag.last_loaded < orm_dag.last_expired ) if is_expired: # Remove associated dags so we can re-add them. - self.dags = {key: dag for key, dag in self.dags.items()} + self.dags.pop(dag_id, None) if is_missing or is_expired: # Reprocess source file. found_dags = self.process_file( @@ -308,7 +362,7 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True): DagContext.autoregistered_dags.clear() self.captured_warnings.pop(filepath, None) - with capture_with_reraise() as captured_warnings: + with _capture_with_reraise() as captured_warnings: if filepath.endswith(".py") or not zipfile.is_zipfile(filepath): mods = self._load_modules_from_file(filepath, safe_mode) else: @@ -357,9 +411,32 @@ def get_pools(dag) -> dict[str, set[str]]: ) return warnings + def _get_relative_fileloc(self, filepath: str) -> str: + """ + Get the relative file location for a given filepath. + + :param filepath: Absolute path to the file + :return: Relative path from bundle_path, or original filepath if no bundle_path + """ + if self.bundle_path: + return str(Path(filepath).relative_to(self.bundle_path)) + return filepath + def _load_modules_from_file(self, filepath, safe_mode): from airflow.sdk.definitions._internal.contextmanager import DagContext + def handler(signum, frame): + """Handle SIGSEGV signal and let the user know that the import failed.""" + msg = f"Received SIGSEGV signal while processing {filepath}." + self.log.error(msg) + relative_filepath = self._get_relative_fileloc(filepath) + self.import_errors[relative_filepath] = msg + + try: + signal.signal(signal.SIGSEGV, handler) + except ValueError: + self.log.warning("SIGSEGV signal handler registration failed. Not in the main thread") + if not might_contain_dag(filepath, safe_mode): # Don't want to spam user with skip messages if not self.has_logged: @@ -392,12 +469,13 @@ def parse(mod_name, filepath): # This would also catch `exit()` in a dag file DagContext.autoregistered_dags.clear() self.log.exception("Failed to import: %s", filepath) + relative_filepath = self._get_relative_fileloc(filepath) if self.dagbag_import_error_tracebacks: - self.import_errors[filepath] = traceback.format_exc( + self.import_errors[relative_filepath] = traceback.format_exc( limit=-self.dagbag_import_error_traceback_depth ) else: - self.import_errors[filepath] = str(e) + self.import_errors[relative_filepath] = str(e) return [] dagbag_import_timeout = settings.get_dagbag_import_timeout(filepath) @@ -457,12 +535,13 @@ def _load_modules_from_zip(self, filepath, safe_mode): DagContext.autoregistered_dags.clear() fileloc = os.path.join(filepath, zip_info.filename) self.log.exception("Failed to import: %s", fileloc) + relative_fileloc = self._get_relative_fileloc(fileloc) if self.dagbag_import_error_tracebacks: - self.import_errors[fileloc] = traceback.format_exc( + self.import_errors[relative_fileloc] = traceback.format_exc( limit=-self.dagbag_import_error_traceback_depth ) else: - self.import_errors[fileloc] = str(e) + self.import_errors[relative_fileloc] = str(e) finally: if sys.path[0] == filepath: del sys.path[0] @@ -484,10 +563,8 @@ def _process_modules(self, filepath, mods, file_last_changed_on_disk): for dag, mod in top_level_dags: dag.fileloc = mod.__file__ - if self.bundle_path: - dag.relative_fileloc = str(Path(mod.__file__).relative_to(self.bundle_path)) - else: - dag.relative_fileloc = dag.fileloc + relative_fileloc = self._get_relative_fileloc(dag.fileloc) + dag.relative_fileloc = relative_fileloc try: dag.validate() self.bag_dag(dag=dag) @@ -495,7 +572,7 @@ def _process_modules(self, filepath, mods, file_last_changed_on_disk): pass except Exception as e: self.log.exception("Failed to bag_dag: %s", dag.fileloc) - self.import_errors[dag.fileloc] = f"{type(e).__name__}: {e}" + self.import_errors[relative_fileloc] = f"{type(e).__name__}: {e}" self.file_last_changed[dag.fileloc] = file_last_changed_on_disk else: found_dags.append(dag) @@ -505,10 +582,10 @@ def bag_dag(self, dag: DAG): """ Add the DAG into the bag. - :raises: AirflowDagCycleException if a cycle is detected in this dag or its subdags. - :raises: AirflowDagDuplicatedIdException if this dag or its subdags already exists in the bag. + :raises: AirflowDagCycleException if a cycle is detected. + :raises: AirflowDagDuplicatedIdException if this dag already exists in the bag. """ - check_cycle(dag) # throws if a task cycle is found + dag.check_cycle() # throws exception if a task cycle is found dag.resolve_template_files() dag.last_loaded = timezone.utcnow() @@ -655,17 +732,85 @@ def sync_to_db(self, bundle_name: str, bundle_version: str | None, session: Sess else LazyDeserializedDAG(data=SerializedDAG.to_dict(dag)) for dag in self.dags.values() ] + import_errors = {(bundle_name, rel_path): error for rel_path, error in self.import_errors.items()} update_dag_parsing_results_in_db( bundle_name, bundle_version, dags, - self.import_errors, + import_errors, self.dag_warnings, session=session, ) +class DBDagBag: + """ + Internal class for retrieving and caching dags in the scheduler. + + :meta private: + """ + + def __init__(self, load_op_links: bool = True): + self._dags: dict[str, DAG] = {} # dag_version_id to dag + self.load_op_links = load_op_links + + def _get_dag(self, version_id: str, session: Session) -> DAG | None: + if dag := self._dags.get(version_id): + return dag + dag_version = session.get(DagVersion, version_id, options=[joinedload(DagVersion.serialized_dag)]) + if not dag_version: + return None + serdag = dag_version.serialized_dag + if not serdag: + return None + serdag.load_op_links = self.load_op_links + dag = serdag.dag + if not dag: + return None + self._dags[version_id] = dag + return dag + + @staticmethod + def _version_from_dag_run(dag_run, session): + if not dag_run.bundle_version: + dag_version = DagVersion.get_latest_version(dag_id=dag_run.dag_id, session=session) + if dag_version: + return dag_version + + # Check if created_dag_version relationship is already loaded to avoid DetachedInstanceError + info = inspect(dag_run) + if info.attrs.created_dag_version.loaded_value is not NO_VALUE: + # Relationship is already loaded, safe to access + return dag_run.created_dag_version + + # Relationship not loaded, fetch it explicitly from current session + return session.get(DagVersion, dag_run.created_dag_version_id) + + def get_dag_for_run(self, dag_run: DagRun, session: Session) -> DAG | None: + version = self._version_from_dag_run(dag_run=dag_run, session=session) + if not version: + return None + return self._get_dag(version_id=version.id, session=session) + + def get_latest_version_of_dag(self, dag_id: str, session: Session) -> DAG | None: + """ + Get the latest version of a DAG by its ID. + + This method retrieves the latest version of the DAG with the given ID. + """ + from airflow.models.serialized_dag import SerializedDagModel + + serdag = SerializedDagModel.get(dag_id, session=session) + if not serdag: + return None + serdag.load_op_links = self.load_op_links + dag = serdag.dag + + self._dags[serdag.dag_version.id] = dag + return dag + + def generate_md5_hash(context): bundle_name = context.get_current_parameters()["bundle_name"] relative_fileloc = context.get_current_parameters()["relative_fileloc"] diff --git a/airflow-core/src/airflow/models/dagbundle.py b/airflow-core/src/airflow/models/dagbundle.py index e1f99d5effcc9..f0343d9de7cd7 100644 --- a/airflow-core/src/airflow/models/dagbundle.py +++ b/airflow-core/src/airflow/models/dagbundle.py @@ -17,12 +17,14 @@ from __future__ import annotations from sqlalchemy import Boolean, Column, String +from sqlalchemy_utils import JSONType from airflow.models.base import Base, StringID +from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.sqlalchemy import UtcDateTime -class DagBundleModel(Base): +class DagBundleModel(Base, LoggingMixin): """ A table for storing DAG bundle metadata. @@ -32,6 +34,8 @@ class DagBundleModel(Base): - active: Is the bundle currently found in configuration? - version: The latest version Airflow has seen for the bundle. - last_refreshed: When the bundle was last refreshed. + - signed_url_template: Signed URL template for viewing the bundle + - template_params: JSON object containing template parameters for constructing view url (e.g., {"subdir": "dags"}) """ @@ -40,6 +44,59 @@ class DagBundleModel(Base): active = Column(Boolean, default=True) version = Column(String(200), nullable=True) last_refreshed = Column(UtcDateTime, nullable=True) + signed_url_template = Column(String(200), nullable=True) + template_params = Column(JSONType, nullable=True) - def __init__(self, *, name: str): + def __init__(self, *, name: str, version: str | None = None): + super().__init__() self.name = name + self.version = version + + def _unsign_url(self) -> str | None: + """ + Unsign a URL token to get the original URL template. + + :param signed_url: The signed URL token + :return: The original URL template or None if unsigning fails + """ + try: + from itsdangerous import BadSignature, URLSafeSerializer + + from airflow.configuration import conf + + serializer = URLSafeSerializer(conf.get_mandatory_value("core", "fernet_key")) + payload = serializer.loads(self.signed_url_template) + if isinstance(payload, dict) and "url" in payload and "bundle_name" in payload: + if payload["bundle_name"] == self.name: + return payload["url"] + + return None + except (BadSignature, Exception): + return None + + def render_url(self, version: str | None = None) -> str | None: + """ + Render the URL template with the given version and stored template parameters. + + First unsigns the URL to get the original template, then formats it with + the provided version and any additional parameters. + + :param version: The version to substitute in the template + :return: The rendered URL or None if no template is available + """ + if not self.signed_url_template: + return None + + url_template = self._unsign_url() + + if url_template is None: + return None + + params = dict(self.template_params or {}) + params["version"] = version + + try: + return url_template.format(**params) + except (KeyError, ValueError) as e: + self.log.warning("Failed to render URL template for bundle %s: %s", self.name, e) + return None diff --git a/airflow-core/src/airflow/models/dagcode.py b/airflow-core/src/airflow/models/dagcode.py index 2db338f87b50e..a68885e1dfdf2 100644 --- a/airflow-core/src/airflow/models/dagcode.py +++ b/airflow-core/src/airflow/models/dagcode.py @@ -26,10 +26,10 @@ from sqlalchemy.sql.expression import literal from sqlalchemy_utils import UUIDType +from airflow._shared.timezones import timezone from airflow.configuration import conf from airflow.exceptions import DagCodeNotFound from airflow.models.base import ID_LEN, Base -from airflow.utils import timezone from airflow.utils.file import open_maybe_zipped from airflow.utils.hashlib_wrapper import md5 from airflow.utils.session import NEW_SESSION, provide_session diff --git a/airflow-core/src/airflow/models/dagrun.py b/airflow-core/src/airflow/models/dagrun.py index 929ad80f25429..9eed649f0cad3 100644 --- a/airflow-core/src/airflow/models/dagrun.py +++ b/airflow-core/src/airflow/models/dagrun.py @@ -21,17 +21,10 @@ import os import re from collections import defaultdict -from collections.abc import Iterable, Iterator, Sequence -from typing import ( - TYPE_CHECKING, - Any, - Callable, - NamedTuple, - TypeVar, - Union, - overload, -) +from collections.abc import Callable, Iterable, Iterator, Sequence +from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar, cast, overload +from natsort import natsorted from sqlalchemy import ( JSON, Column, @@ -45,6 +38,7 @@ Text, UniqueConstraint, and_, + case, func, not_, or_, @@ -54,17 +48,19 @@ from sqlalchemy.dialects import postgresql from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.associationproxy import association_proxy +from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.orm import declared_attr, joinedload, relationship, synonym, validates -from sqlalchemy.sql.expression import case, false, select +from sqlalchemy.sql.expression import false, select from sqlalchemy.sql.functions import coalesce from sqlalchemy_utils import UUIDType -from airflow.callbacks.callback_requests import DagCallbackRequest +from airflow._shared.timezones import timezone +from airflow.callbacks.callback_requests import DagCallbackRequest, DagRunContext from airflow.configuration import conf as airflow_conf from airflow.exceptions import AirflowException, TaskNotFound from airflow.listeners.listener import get_listener_manager -from airflow.models import Log +from airflow.models import Deadline, Log from airflow.models.backfill import Backfill from airflow.models.base import Base, StringID from airflow.models.taskinstance import TaskInstance as TI @@ -72,11 +68,11 @@ from airflow.models.tasklog import LogTemplate from airflow.models.taskmap import TaskMap from airflow.sdk.definitions._internal.abstractoperator import NotMapped +from airflow.sdk.definitions.deadline import DeadlineReference from airflow.stats import Stats from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.dependencies_states import SCHEDULEABLE_STATES from airflow.traces.tracer import EmptySpan, Trace -from airflow.utils import timezone from airflow.utils.dates import datetime_to_nano from airflow.utils.helpers import chunks, is_container, prune_dict from airflow.utils.log.logging_mixin import LoggingMixin @@ -91,19 +87,29 @@ if TYPE_CHECKING: from datetime import datetime + from typing import Literal, TypeAlias from opentelemetry.sdk.trace import Span + from pydantic import NonNegativeInt from sqlalchemy.orm import Query, Session + from sqlalchemy.sql.elements import Case - from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG from airflow.models.dag_version import DagVersion - from airflow.models.operator import Operator - from airflow.typing_compat import Literal + from airflow.models.mappedoperator import MappedOperator + from airflow.models.taskinstancekey import TaskInstanceKey + from airflow.sdk import DAG as SDKDAG + from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.utils.types import ArgNotSet + Operator: TypeAlias = MappedOperator | SerializedBaseOperator + CreatedTasks = TypeVar("CreatedTasks", Iterator["dict[str, Any]"], Iterator[TI]) + AttributeValueType = ( + str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float] + ) + RUN_ID_REGEX = r"^(?:manual|scheduled|asset_triggered)__(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+00:00)$" @@ -123,7 +129,7 @@ def _default_run_after(ctx): def _creator_note(val): - """Creator the ``note`` association proxy.""" + """Creator for the ``note`` association proxy.""" if isinstance(val, str): return DagRunNote(content=val) if isinstance(val, dict): @@ -156,6 +162,10 @@ class DagRun(Base, LoggingMixin): triggered_by = Column( Enum(DagRunTriggeredByType, native_enum=False, length=50) ) # Airflow component that triggered the run. + triggering_user_name = Column( + String(512), + nullable=True, + ) # The user that triggered the DagRun, if applicable conf = Column(JSON().with_variant(postgresql.JSONB, "postgresql")) # These two must be either both NULL or both datetime. data_interval_start = Column(UtcDateTime) @@ -254,6 +264,13 @@ class DagRun(Base, LoggingMixin): cascade="all, delete, delete-orphan", ) + deadlines = relationship( + "Deadline", + back_populates="dagrun", + uselist=True, + cascade="all, delete, delete-orphan", + ) + created_dag_version = relationship("DagVersion", uselist=False, passive_deletes=True) """ The dag version that was active when the dag run was created, if available. @@ -290,7 +307,8 @@ def __init__( creating_job_id: int | None = None, data_interval: tuple[datetime, datetime] | None = None, triggered_by: DagRunTriggeredByType | None = None, - backfill_id: int | None = None, + triggering_user_name: str | None = None, + backfill_id: NonNegativeInt | None = None, bundle_version: str | None = None, ): # For manual runs where logical_date is None, ensure no data_interval is set. @@ -320,6 +338,7 @@ def __init__( self.backfill_id = backfill_id self.clear_number = 0 self.triggered_by = triggered_by + self.triggering_user_name = triggering_user_name self.scheduled_by_job_id = None self.context_carrier = {} super().__init__() @@ -348,7 +367,7 @@ def dag_versions(self) -> list[DagVersion]: """Return the DAG versions associated with the TIs of this DagRun.""" # when the dag is in a versioned bundle, we keep the dag version fixed if self.bundle_version: - return [self.created_dag_version] + return [self.created_dag_version] if self.created_dag_version is not None else [] dag_versions = [ dv for dv in dict.fromkeys(list(self._tih_dag_versions) + list(self._ti_dag_versions)) @@ -365,6 +384,26 @@ def version_number(self) -> int | None: return dag_versions[-1].version_number return None + @hybrid_property + def duration(self) -> float | None: + if self.end_date and self.start_date: + return (self.end_date - self.start_date).total_seconds() + return None + + @duration.expression # type: ignore[no-redef] + @provide_session + def duration(cls, session: Session = NEW_SESSION) -> Case: + dialect_name = session.bind.dialect.name + if dialect_name == "mysql": + return func.timestampdiff(text("SECOND"), cls.start_date, cls.end_date) + + when_condition = ( + (cls.end_date != None) & (cls.start_date != None), # noqa: E711 + func.extract("epoch", cls.end_date - cls.start_date), + ) + + return case(when_condition, else_=None) + @provide_session def check_version_id_exists_in_dr(self, dag_version_id: UUIDType, session: Session = NEW_SESSION): select_stmt = ( @@ -944,19 +983,8 @@ def set_dagrun_span_attrs(self, span: Span | EmptySpan): if self._state == DagRunState.FAILED: span.set_attribute("airflow.dag_run.error", True) - attribute_value_type = Union[ - str, - bool, - int, - float, - Sequence[str], - Sequence[bool], - Sequence[int], - Sequence[float], - ] - # Explicitly set the value type to Union[...] to avoid a mypy error. - attributes: dict[str, attribute_value_type] = { + attributes: dict[str, AttributeValueType] = { "airflow.category": "DAG runs", "airflow.dag_run.dag_id": str(self.dag_id), "airflow.dag_run.logical_date": str(self.logical_date), @@ -981,7 +1009,7 @@ def set_dagrun_span_attrs(self, span: Span | EmptySpan): def start_dr_spans_if_needed(self, tis: list[TI]): # If there is no value in active_spans, then the span hasn't already been started. - if self.active_spans is not None and self.active_spans.get(self.run_id) is None: + if self.active_spans is not None and self.active_spans.get("dr:" + str(self.id)) is None: if self.span_status == SpanStatus.NOT_STARTED or self.span_status == SpanStatus.NEEDS_CONTINUANCE: dr_span = None continue_ti_spans = False @@ -1014,7 +1042,7 @@ def start_dr_spans_if_needed(self, tis: list[TI]): self.context_carrier = carrier self.span_status = SpanStatus.ACTIVE # Set the span in a synchronized dictionary, so that the variable can be used to end the span. - self.active_spans.set(self.run_id, dr_span) + self.active_spans.set("dr:" + str(self.id), dr_span) self.log.debug( "DagRun span has been started and the injected context_carrier is: %s", self.context_carrier, @@ -1032,9 +1060,9 @@ def start_dr_spans_if_needed(self, tis: list[TI]): ti_carrier = Trace.inject() ti.context_carrier = ti_carrier ti.span_status = SpanStatus.ACTIVE - self.active_spans.set(ti.key, ti_span) + self.active_spans.set("ti:" + ti.id, ti_span) else: - self.log.info( + self.log.debug( "Found span_status '%s', while updating state for dag_run '%s'", self.span_status, self.run_id, @@ -1042,7 +1070,7 @@ def start_dr_spans_if_needed(self, tis: list[TI]): def end_dr_span_if_needed(self): if self.active_spans is not None: - active_span = self.active_spans.get(self.run_id) + active_span = self.active_spans.get("dr:" + str(self.id)) if active_span is not None: self.log.debug( "Found active span with span_id: %s, for dag_id: %s, run_id: %s, state: %s", @@ -1055,7 +1083,7 @@ def end_dr_span_if_needed(self): self.set_dagrun_span_attrs(span=active_span) active_span.end(end_time=datetime_to_nano(self.end_date)) # Remove the span from the dict. - self.active_spans.delete(self.run_id) + self.active_spans.delete("dr:" + str(self.id)) self.span_status = SpanStatus.ENDED else: if self.span_status == SpanStatus.ACTIVE: @@ -1146,8 +1174,8 @@ def recalculate(self) -> _UnfinishedStates: self.set_state(DagRunState.FAILED) self.notify_dagrun_state_changed(msg="task_failure") - if execute_callbacks: - dag.handle_callback(self, success=False, reason="task_failure", session=session) + if execute_callbacks and dag.has_on_failure_callback: + self.handle_dag_callback(dag=dag, success=False, reason="task_failure") elif dag.has_on_failure_callback: callback = DagCallbackRequest( filepath=self.dag_model.relative_fileloc, @@ -1155,6 +1183,10 @@ def recalculate(self) -> _UnfinishedStates: run_id=self.run_id, bundle_name=self.dag_model.bundle_name, bundle_version=self.bundle_version, + context_from_server=DagRunContext( + dag_run=self, + last_ti=self.get_last_ti(dag=dag, session=session), + ), is_failure_callback=True, msg="task_failure", ) @@ -1175,8 +1207,8 @@ def recalculate(self) -> _UnfinishedStates: self.set_state(DagRunState.SUCCESS) self.notify_dagrun_state_changed(msg="success") - if execute_callbacks: - dag.handle_callback(self, success=True, reason="success", session=session) + if execute_callbacks and dag.has_on_success_callback: + self.handle_dag_callback(dag=dag, success=True, reason="success") elif dag.has_on_success_callback: callback = DagCallbackRequest( filepath=self.dag_model.relative_fileloc, @@ -1184,18 +1216,26 @@ def recalculate(self) -> _UnfinishedStates: run_id=self.run_id, bundle_name=self.dag_model.bundle_name, bundle_version=self.bundle_version, + context_from_server=DagRunContext( + dag_run=self, + last_ti=self.get_last_ti(dag=dag, session=session), + ), is_failure_callback=False, msg="success", ) + if (deadline := dag.deadline) and isinstance(deadline.reference, DeadlineReference.TYPES.DAGRUN): + # The dagrun has succeeded. If there wre any Deadlines for it which were not breached, they are no longer needed. + Deadline.prune_deadlines(session=session, conditions={DagRun.run_id: self.run_id}) + # if *all tasks* are deadlocked, the run failed elif unfinished.should_schedule and not are_runnable_tasks: self.log.error("Task deadlock (no runnable tasks); marking run %s failed", self) self.set_state(DagRunState.FAILED) self.notify_dagrun_state_changed(msg="all_tasks_deadlocked") - if execute_callbacks: - dag.handle_callback(self, success=False, reason="all_tasks_deadlocked", session=session) + if execute_callbacks and dag.has_on_failure_callback: + self.handle_dag_callback(dag=dag, success=False, reason="all_tasks_deadlocked") elif dag.has_on_failure_callback: callback = DagCallbackRequest( filepath=self.dag_model.relative_fileloc, @@ -1203,6 +1243,10 @@ def recalculate(self) -> _UnfinishedStates: run_id=self.run_id, bundle_name=self.dag_model.bundle_name, bundle_version=self.bundle_version, + context_from_server=DagRunContext( + dag_run=self, + last_ti=self.get_last_ti(dag=dag, session=session), + ), is_failure_callback=True, msg="all_tasks_deadlocked", ) @@ -1260,7 +1304,9 @@ def _filter_tis_and_exclude_removed(dag: DAG, tis: list[TI]) -> Iterable[TI]: """Populate ``ti.task`` while excluding those missing one, marking them as REMOVED.""" for ti in tis: try: - ti.task = dag.get_task(ti.task_id) + # TODO (GH-52141): get_task in scheduler needs to return scheduler types + # instead, but currently it inherits SDK's DAG. + ti.task = cast("Operator", dag.get_task(ti.task_id)) except TaskNotFound: if ti.state != TaskInstanceState.REMOVED: self.log.error("Failed to get task for ti %s. Marking it as removed.", ti) @@ -1315,13 +1361,101 @@ def notify_dagrun_state_changed(self, msg: str = ""): # we can't get all the state changes on SchedulerJob, # or LocalTaskJob, so we don't want to "falsely advertise" we notify about that + @provide_session + def get_last_ti(self, dag: DAG, session: Session = NEW_SESSION) -> TI | None: + """Get Last TI from the dagrun to build and pass Execution context object from server to then run callbacks.""" + tis = self.get_task_instances(session=session) + # tis from a dagrun may not be a part of dag.partial_subset, + # since dag.partial_subset is a subset of the dag. + # This ensures that we will only use the accessible TI + # context for the callback. + if dag.partial: + tis = [ti for ti in tis if not ti.state == State.NONE] + # filter out removed tasks + tis = natsorted( + (ti for ti in tis if ti.state != TaskInstanceState.REMOVED), + key=lambda ti: ti.task_id, + ) + if not tis: + return None + ti = tis[-1] # get last TaskInstance of DagRun + return ti + + def handle_dag_callback(self, dag: SDKDAG, success: bool = True, reason: str = "success"): + """Only needed for `dag.test` where `execute_callbacks=True` is passed to `update_state`.""" + from airflow.api_fastapi.execution_api.datamodels.taskinstance import ( + DagRun as DRDataModel, + TaskInstance as TIDataModel, + TIRunContext, + ) + from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance + + last_ti = self.get_last_ti(dag) # type: ignore[arg-type] + if last_ti: + last_ti_model = TIDataModel.model_validate(last_ti, from_attributes=True) + task = dag.get_task(last_ti.task_id) + + dag_run_data = DRDataModel( + dag_id=self.dag_id, + run_id=self.run_id, + logical_date=self.logical_date, + data_interval_start=self.data_interval_start, + data_interval_end=self.data_interval_end, + run_after=self.run_after, + start_date=self.start_date, + end_date=self.end_date, + run_type=self.run_type, + state=self.state, + conf=self.conf, + consumed_asset_events=[], + ) + + runtime_ti = RuntimeTaskInstance.model_construct( + **last_ti_model.model_dump(exclude_unset=True), + task=task, + _ti_context_from_server=TIRunContext( + dag_run=dag_run_data, + max_tries=last_ti.max_tries, + variables=[], + connections=[], + xcom_keys_to_clear=[], + ), + max_tries=last_ti.max_tries, + ) + context = runtime_ti.get_template_context() + else: + context = { + "dag": dag, + "run_id": self.run_id, + } + + context["reason"] = reason + + callbacks = dag.on_success_callback if success else dag.on_failure_callback + if not callbacks: + self.log.warning("Callback requested, but dag didn't have any for DAG: %s.", dag.dag_id) + return + callbacks = callbacks if isinstance(callbacks, list) else [callbacks] + + for callback in callbacks: + self.log.info( + "Executing on_%s dag callback: %s", + "success" if success else "failure", + callback.__name__ if hasattr(callback, "__name__") else repr(callback), + ) + try: + callback(context) + except Exception: + self.log.exception("Callback failed for %s", dag.dag_id) + Stats.incr("dag.callback_exceptions", tags={"dag_id": dag.dag_id}) + def _get_ready_tis( self, schedulable_tis: list[TI], finished_tis: list[TI], session: Session, ) -> tuple[list[TI], bool, bool]: - old_states = {} + old_states: dict[TaskInstanceKey, Any] = {} ready_tis: list[TI] = [] changed_tis = False @@ -1373,10 +1507,10 @@ def _expand_mapped_task_if_needed(ti: TI) -> Iterable[TI] | None: # Check dependencies. expansion_happened = False # Set of task ids for which was already done _revise_map_indexes_if_mapped - revised_map_index_task_ids = set() + revised_map_index_task_ids: set[str] = set() for schedulable in itertools.chain(schedulable_tis, additional_tis): if TYPE_CHECKING: - assert isinstance(schedulable.task, BaseOperator) + assert isinstance(schedulable.task, SerializedBaseOperator) old_state = schedulable.state if not schedulable.are_dependencies_met(session=session, dep_context=dep_context): old_states[schedulable.key] = old_state @@ -1397,7 +1531,11 @@ def _expand_mapped_task_if_needed(ti: TI) -> Iterable[TI] | None: # It's enough to revise map index once per task id, # checking the map index for each mapped task significantly slows down scheduling if schedulable.task.task_id not in revised_map_index_task_ids: - ready_tis.extend(self._revise_map_indexes_if_mapped(schedulable.task, session=session)) + ready_tis.extend( + self._revise_map_indexes_if_mapped( + schedulable.task, dag_version_id=schedulable.dag_version_id, session=session + ) + ) revised_map_index_task_ids.add(schedulable.task.task_id) ready_tis.append(schedulable) @@ -1500,9 +1638,7 @@ def _emit_duration_stats_for_finished_state(self): Stats.timing(f"dagrun.duration.{self.state}", **timer_params) @provide_session - def verify_integrity( - self, *, session: Session = NEW_SESSION, dag_version_id: UUIDType | None = None - ) -> None: + def verify_integrity(self, *, session: Session = NEW_SESSION, dag_version_id: UUIDType) -> None: """ Verify the DagRun by checking for removed tasks or tasks that are not in the database yet. @@ -1539,8 +1675,13 @@ def task_filter(task: Operator) -> bool: ) # Create the missing tasks, including mapped tasks - tasks_to_create = (task for task in dag.task_dict.values() if task_filter(task)) - tis_to_create = self._create_tasks(tasks_to_create, task_creator, session=session) + tis_to_create = self._create_tasks( + # TODO (GH-52141): task_dict in scheduler should contain scheduler + # types instead, but currently it inherits SDK's DAG. + (task for task in cast("Iterable[Operator]", dag.task_dict.values()) if task_filter(task)), + task_creator, + session=session, + ) self._create_task_instances(self.dag_id, tis_to_create, created_counts, hook_is_noop, session=session) def _check_for_removed_or_restored_tasks( @@ -1556,8 +1697,8 @@ def _check_for_removed_or_restored_tasks( :return: Task IDs in the DAG run """ - from airflow.models.baseoperator import BaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import get_mapped_ti_count tis = self.get_task_instances(session=session) @@ -1594,7 +1735,7 @@ def _check_for_removed_or_restored_tasks( except NotFullyPopulated: # What if it is _now_ dynamically mapped, but wasn't before? try: - total_length = BaseOperator.get_mapped_ti_count(task, self.run_id, session=session) + total_length = get_mapped_ti_count(task, self.run_id, session=session) except NotFullyPopulated: # Not all upstreams finished, so we can't tell what should be here. Remove everything. if ti.map_index >= 0: @@ -1632,7 +1773,7 @@ def _get_task_creator( created_counts: dict[str, int], ti_mutation_hook: Callable, hook_is_noop: Literal[True], - dag_version_id: UUIDType | None, + dag_version_id: UUIDType, ) -> Callable[[Operator, Iterable[int]], Iterator[dict[str, Any]]]: ... @overload @@ -1641,7 +1782,7 @@ def _get_task_creator( created_counts: dict[str, int], ti_mutation_hook: Callable, hook_is_noop: Literal[False], - dag_version_id: UUIDType | None, + dag_version_id: UUIDType, ) -> Callable[[Operator, Iterable[int]], Iterator[TI]]: ... def _get_task_creator( @@ -1649,7 +1790,7 @@ def _get_task_creator( created_counts: dict[str, int], ti_mutation_hook: Callable, hook_is_noop: Literal[True, False], - dag_version_id: UUIDType | None, + dag_version_id: UUIDType, ) -> Callable[[Operator, Iterable[int]], Iterator[dict[str, Any]] | Iterator[TI]]: """ Get the task creator function. @@ -1697,13 +1838,13 @@ def _create_tasks( :param tasks: Tasks to create jobs for in the DAG run :param task_creator: Function to create task instances """ - from airflow.models.baseoperator import BaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import get_mapped_ti_count map_indexes: Iterable[int] for task in tasks: try: - count = BaseOperator.get_mapped_ti_count(task, self.run_id, session=session) + count = get_mapped_ti_count(task, self.run_id, session=session) except (NotMapped, NotFullyPopulated): map_indexes = (-1,) else: @@ -1760,7 +1901,9 @@ def _create_task_instances( # TODO[HA]: We probably need to savepoint this so we can keep the transaction alive. session.rollback() - def _revise_map_indexes_if_mapped(self, task: Operator, *, session: Session) -> Iterator[TI]: + def _revise_map_indexes_if_mapped( + self, task: Operator, *, dag_version_id: UUIDType, session: Session + ) -> Iterator[TI]: """ Check if task increased or reduced in length and handle appropriately. @@ -1769,12 +1912,12 @@ def _revise_map_indexes_if_mapped(self, task: Operator, *, session: Session) -> we delay expansion to the "last resort". See comments at the call site for more details. """ - from airflow.models.baseoperator import BaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import get_mapped_ti_count from airflow.settings import task_instance_mutation_hook try: - total_length = BaseOperator.get_mapped_ti_count(task, self.run_id, session=session) + total_length = get_mapped_ti_count(task, self.run_id, session=session) except NotMapped: return # Not a mapped task, don't need to do anything. except NotFullyPopulated: @@ -1806,7 +1949,7 @@ def _revise_map_indexes_if_mapped(self, task: Operator, *, session: Session) -> for index in range(total_length): if index in existing_indexes: continue - ti = TI(task, run_id=self.run_id, map_index=index, state=None) + ti = TI(task, run_id=self.run_id, map_index=index, state=None, dag_version_id=dag_version_id) self.log.debug("Expanding TIs upserted %s", ti) task_instance_mutation_hook(ti) ti = session.merge(ti) @@ -1849,16 +1992,17 @@ def schedule_tis( """ # Get list of TI IDs that do not need to executed, these are # tasks using EmptyOperator and without on_execute_callback / on_success_callback - empty_ti_ids = [] - schedulable_ti_ids = [] + empty_ti_ids: list[str] = [] + schedulable_ti_ids: list[str] = [] for ti in schedulable_tis: if TYPE_CHECKING: - assert isinstance(ti.task, BaseOperator) + assert isinstance(ti.task, SerializedBaseOperator) if ( ti.task.inherits_from_empty_operator and not ti.task.on_execute_callback and not ti.task.on_success_callback and not ti.task.outlets + and not ti.task.inlets ): empty_ti_ids.append(ti.id) # check "start_trigger_args" to see whether the operator supports start execution from triggerer diff --git a/airflow-core/src/airflow/models/dagwarning.py b/airflow-core/src/airflow/models/dagwarning.py index b88158683eaf5..7498e615e30f7 100644 --- a/airflow-core/src/airflow/models/dagwarning.py +++ b/airflow-core/src/airflow/models/dagwarning.py @@ -22,9 +22,9 @@ from sqlalchemy import Column, ForeignKeyConstraint, Index, String, Text, delete, select, true +from airflow._shared.timezones import timezone from airflow.models.base import Base, StringID from airflow.models.dag import DagModel -from airflow.utils import timezone from airflow.utils.retries import retry_db_transaction from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime diff --git a/airflow-core/src/airflow/models/db_callback_request.py b/airflow-core/src/airflow/models/db_callback_request.py index ada61d47c7a30..f1009ca1babe4 100644 --- a/airflow-core/src/airflow/models/db_callback_request.py +++ b/airflow-core/src/airflow/models/db_callback_request.py @@ -22,8 +22,8 @@ from sqlalchemy import Column, Integer, String +from airflow._shared.timezones import timezone from airflow.models.base import Base -from airflow.utils import timezone from airflow.utils.sqlalchemy import ExtendedJSON, UtcDateTime if TYPE_CHECKING: diff --git a/airflow-core/src/airflow/models/deadline.py b/airflow-core/src/airflow/models/deadline.py index 0c27cbcfc196e..8dc48acf3146d 100644 --- a/airflow-core/src/airflow/models/deadline.py +++ b/airflow-core/src/airflow/models/deadline.py @@ -16,77 +16,390 @@ # under the License. from __future__ import annotations -from datetime import datetime -from typing import TYPE_CHECKING +import logging +from abc import ABC, abstractmethod +from dataclasses import dataclass +from datetime import datetime, timedelta +from enum import Enum +from functools import cached_property +from typing import TYPE_CHECKING, Any, cast import sqlalchemy_jsonfield import uuid6 -from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, String +from sqlalchemy import Column, ForeignKey, Index, Integer, String, and_, select +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.orm import relationship from sqlalchemy_utils import UUIDType -from airflow.models.base import Base, StringID +from airflow._shared.timezones import timezone +from airflow.models import Trigger +from airflow.models.base import Base +from airflow.serialization.serde import deserialize, serialize from airflow.settings import json +from airflow.triggers.deadline import PAYLOAD_STATUS_KEY, DeadlineCallbackTrigger from airflow.utils.log.logging_mixin import LoggingMixin -from airflow.utils.session import NEW_SESSION, provide_session +from airflow.utils.session import provide_session +from airflow.utils.sqlalchemy import UtcDateTime if TYPE_CHECKING: from sqlalchemy.orm import Session + from airflow.sdk.definitions.deadline import Callback + from airflow.triggers.base import TriggerEvent -class Deadline(Base, LoggingMixin): + +logger = logging.getLogger(__name__) + + +class classproperty: + """ + Decorator that converts a method with a single cls argument into a property. + + Mypy won't let us use both @property and @classmethod together, this is a workaround + to combine the two. + + Usage: + + class Circle: + def __init__(self, radius): + self.radius = radius + + @classproperty + def pi(cls): + return 3.14159 + + print(Circle.pi) # Outputs: 3.14159 + """ + + def __init__(self, method): + self.method = method + + def __get__(self, instance, cls=None): + return self.method(cls) + + +class DeadlineCallbackState(str, Enum): + """ + All possible states of deadline callbacks once the deadline is missed. + + `None` state implies that the deadline is pending (`deadline_time` hasn't passed yet). + """ + + QUEUED = "queued" + SUCCESS = "success" + FAILED = "failed" + + +class Deadline(Base): """A Deadline is a 'need-by' date which triggers a callback if the provided time has passed.""" __tablename__ = "deadline" id = Column(UUIDType(binary=False), primary_key=True, default=uuid6.uuid7) - # If the Deadline Alert is for a DAG, store the DAG ID and Run ID from the dag_run. - dag_id = Column(StringID(), ForeignKey("dag.dag_id", ondelete="CASCADE")) + # If the Deadline Alert is for a DAG, store the DAG run ID from the dag_run. dagrun_id = Column(Integer, ForeignKey("dag_run.id", ondelete="CASCADE")) # The time after which the Deadline has passed and the callback should be triggered. - deadline = Column(DateTime, nullable=False) - # The Callback to be called when the Deadline has passed. - callback = Column(String(500), nullable=False) - # Serialized kwargs to pass to the callback. - callback_kwargs = Column(sqlalchemy_jsonfield.JSONField(json=json)) + deadline_time = Column(UtcDateTime, nullable=False) + # The (serialized) callback to be called when the Deadline has passed. + _callback = Column("callback", sqlalchemy_jsonfield.JSONField(json=json), nullable=False) + # The state of the deadline callback + callback_state = Column(String(20)) - __table_args__ = (Index("deadline_idx", deadline, unique=False),) + dagrun = relationship("DagRun", back_populates="deadlines") + + # The Trigger where the callback is running + trigger_id = Column(Integer, ForeignKey("trigger.id"), nullable=True) + trigger = relationship("Trigger", back_populates="deadline") + + __table_args__ = (Index("deadline_callback_state_time_idx", callback_state, deadline_time, unique=False),) def __init__( self, - deadline: datetime, - callback: str, - callback_kwargs: dict | None = None, - dag_id: str | None = None, - dagrun_id: int | None = None, + deadline_time: datetime, + callback: Callback, + dagrun_id: int, ): super().__init__() - self.deadline = deadline - self.callback = callback - self.callback_kwargs = callback_kwargs - self.dag_id = dag_id + self.deadline_time = deadline_time + self._callback = serialize(callback) self.dagrun_id = dagrun_id def __repr__(self): def _determine_resource() -> tuple[str, str]: """Determine the type of resource based on which values are present.""" - if self.dag_id and self.dagrun_id: - # The deadline is for a dagrun: - return "DagRun", f"Dag: {self.dag_id} Run: {self.dagrun_id}" + if self.dagrun_id: + # The deadline is for a Dag run: + return "DagRun", f"Dag: {self.dagrun.dag_id} Run: {self.dagrun_id}" return "Unknown", "" resource_type, resource_details = _determine_resource() - callback_kwargs = json.dumps(self.callback_kwargs) if self.callback_kwargs else "" return ( f"[{resource_type} Deadline] {resource_details} needed by " - f"{self.deadline} or run: {self.callback}({callback_kwargs})" + f"{self.deadline_time} or run: {self.callback.path}({self.callback.kwargs or ''})" ) @classmethod - @provide_session - def add_deadline(cls, deadline: Deadline, session: Session = NEW_SESSION): - """Add the provided deadline to the table.""" - session.add(deadline) + def prune_deadlines(cls, *, session: Session, conditions: dict[Column, Any]) -> int: + """ + Remove deadlines from the table which match the provided conditions and return the number removed. + + NOTE: This should only be used to remove deadlines which are associated with + successful events (DagRuns, etc). If the deadline was missed, it will be + handled by the scheduler. + + :param conditions: Dictionary of conditions to evaluate against. + :param session: Session to use. + """ + from airflow.models import DagRun # Avoids circular import + + # Assemble the filter conditions. + filter_conditions = [column == value for column, value in conditions.items()] + if not filter_conditions: + return 0 + + try: + # Get deadlines which match the provided conditions and their associated DagRuns. + deadline_dagrun_pairs = ( + session.query(Deadline, DagRun).join(DagRun).filter(and_(*filter_conditions)).all() + ) + except AttributeError as e: + logger.exception("Error resolving deadlines: %s", e) + raise + + if not deadline_dagrun_pairs: + return 0 + + deleted_count = 0 + dagruns_to_refresh = set() + + for deadline, dagrun in deadline_dagrun_pairs: + if dagrun.end_date <= deadline.deadline_time: + # If the DagRun finished before the Deadline: + session.delete(deadline) + deleted_count += 1 + dagruns_to_refresh.add(dagrun) + session.flush() + + logger.debug("%d deadline records were deleted matching the conditions %s", deleted_count, conditions) + + # Refresh any affected DAG runs. + for dagrun in dagruns_to_refresh: + session.refresh(dagrun) + + return deleted_count + + @cached_property + def callback(self) -> Callback: + return cast("Callback", deserialize(self._callback)) + + def handle_miss(self, session: Session): + """Handle a missed deadline by running the callback in the appropriate host and updating the `callback_state`.""" + from airflow.sdk.definitions.deadline import AsyncCallback, SyncCallback + + if isinstance(self.callback, AsyncCallback): + callback_trigger = DeadlineCallbackTrigger( + callback_path=self.callback.path, + callback_kwargs=self.callback.kwargs, + ) + trigger_orm = Trigger.from_object(callback_trigger) + session.add(trigger_orm) + session.flush() + self.trigger = trigger_orm + + elif isinstance(self.callback, SyncCallback): + raise NotImplementedError("SyncCallback is currently not supported") + + else: + raise TypeError("Unknown Callback type") + + self.callback_state = DeadlineCallbackState.QUEUED + session.add(self) + + def handle_callback_event(self, event: TriggerEvent, session: Session): + if (status := event.payload.get(PAYLOAD_STATUS_KEY)) and status in { + DeadlineCallbackState.SUCCESS, + DeadlineCallbackState.FAILED, + }: + self.trigger = None + self.callback_state = event.payload[PAYLOAD_STATUS_KEY] + session.add(self) + else: + logger.error("Unexpected event received: %s", event.payload) + + +class ReferenceModels: + """ + Store the implementations for the different Deadline References. + + After adding the implementations here, all DeadlineReferences should be added + to the user interface in airflow.sdk.definitions.deadline.DeadlineReference + """ + + REFERENCE_TYPE_FIELD = "reference_type" + + @classmethod + def get_reference_class(cls, reference_name: str) -> type[BaseDeadlineReference]: + """ + Get a reference class by its name. + + :param reference_name: The name of the reference class to find + """ + try: + return next( + ref_class + for name, ref_class in vars(cls).items() + if isinstance(ref_class, type) + and issubclass(ref_class, cls.BaseDeadlineReference) + and ref_class.__name__ == reference_name + ) + except StopIteration: + raise ValueError(f"No reference class found with name: {reference_name}") + + class BaseDeadlineReference(LoggingMixin, ABC): + """Base class for all Deadline implementations.""" + + # Set of required kwargs - subclasses should override this. + required_kwargs: set[str] = set() + + @classproperty + def reference_name(cls: Any) -> str: + return cls.__name__ + + def evaluate_with(self, *, session: Session, interval: timedelta, **kwargs: Any) -> datetime: + """Validate the provided kwargs and evaluate this deadline with the given conditions.""" + filtered_kwargs = {k: v for k, v in kwargs.items() if k in self.required_kwargs} + + if missing_kwargs := self.required_kwargs - filtered_kwargs.keys(): + raise ValueError( + f"{self.__class__.__name__} is missing required parameters: {', '.join(missing_kwargs)}" + ) + + if extra_kwargs := kwargs.keys() - filtered_kwargs.keys(): + self.log.debug("Ignoring unexpected parameters: %s", ", ".join(extra_kwargs)) + + return self._evaluate_with(session=session, **filtered_kwargs) + interval + + @abstractmethod + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: + """Must be implemented by subclasses to perform the actual evaluation.""" + raise NotImplementedError + + @classmethod + def deserialize_reference(cls, reference_data: dict): + """ + Deserialize a reference type from its dictionary representation. + + While the base implementation doesn't use reference_data, this parameter is required + for subclasses that need additional data for initialization (like FixedDatetimeDeadline + which needs a datetime value). + + :param reference_data: Dictionary containing serialized reference data. + Always includes a 'reference_type' field, and may include additional + fields needed by specific reference implementations. + """ + return cls() + + def serialize_reference(self) -> dict: + """ + Serialize this reference type into a dictionary representation. + + This method assumes that the reference doesn't require any additional data. + Override this method in subclasses if additional data is needed for serialization. + """ + return {ReferenceModels.REFERENCE_TYPE_FIELD: self.reference_name} + + @dataclass + class FixedDatetimeDeadline(BaseDeadlineReference): + """A deadline that always returns a fixed datetime.""" + + _datetime: datetime + + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: + return self._datetime + + def serialize_reference(self) -> dict: + return { + ReferenceModels.REFERENCE_TYPE_FIELD: self.reference_name, + "datetime": self._datetime.timestamp(), + } + + @classmethod + def deserialize_reference(cls, reference_data: dict): + return cls(_datetime=timezone.from_timestamp(reference_data["datetime"])) + + class DagRunLogicalDateDeadline(BaseDeadlineReference): + """A deadline that returns a DagRun's logical date.""" + + required_kwargs = {"dag_id", "run_id"} + + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: + from airflow.models import DagRun + + return _fetch_from_db(DagRun.logical_date, session=session, **kwargs) + + class DagRunQueuedAtDeadline(BaseDeadlineReference): + """A deadline that returns when a DagRun was queued.""" + + required_kwargs = {"dag_id", "run_id"} + + @provide_session + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: + from airflow.models import DagRun + + return _fetch_from_db(DagRun.queued_at, session=session, **kwargs) + + +DeadlineReferenceType = ReferenceModels.BaseDeadlineReference + + +@provide_session +def _fetch_from_db(model_reference: Column, session=None, **conditions) -> datetime: + """ + Fetch a datetime value from the database using the provided model reference and filtering conditions. + + For example, to fetch a TaskInstance's start_date: + _fetch_from_db( + TaskInstance.start_date, dag_id='example_dag', task_id='example_task', run_id='example_run' + ) + + This generates SQL equivalent to: + SELECT start_date + FROM task_instance + WHERE dag_id = 'example_dag' + AND task_id = 'example_task' + AND run_id = 'example_run' + + :param model_reference: SQLAlchemy Column to select (e.g., DagRun.logical_date, TaskInstance.start_date) + :param conditions: Filtering conditions applied as equality comparisons in the WHERE clause. + Multiple conditions are combined with AND. + :param session: SQLAlchemy session (auto-provided by decorator) + """ + query = select(model_reference) + + for key, value in conditions.items(): + query = query.where(getattr(model_reference.class_, key) == value) + + compiled_query = query.compile(compile_kwargs={"literal_binds": True}) + pretty_query = "\n ".join(str(compiled_query).splitlines()) + logger.debug( + "Executing query:\n %r\nAs SQL:\n %s", + query, + pretty_query, + ) + + try: + result = session.scalar(query) + except SQLAlchemyError: + logger.exception("Database query failed.") + raise + + if result is None: + message = f"No matching record found in the database for query:\n {pretty_query}" + logger.error(message) + raise ValueError(message) + + return result diff --git a/airflow-core/src/airflow/models/errors.py b/airflow-core/src/airflow/models/errors.py index 748d56c46b462..6670df1dfaf62 100644 --- a/airflow-core/src/airflow/models/errors.py +++ b/airflow-core/src/airflow/models/errors.py @@ -19,6 +19,7 @@ from sqlalchemy import Column, Integer, String, Text +from airflow.dag_processing.bundles.manager import DagBundlesManager from airflow.models.base import Base, StringID from airflow.utils.sqlalchemy import UtcDateTime @@ -29,6 +30,11 @@ class ParseImportError(Base): __tablename__ = "import_error" id = Column(Integer, primary_key=True) timestamp = Column(UtcDateTime) - filename = Column(String(1024)) # todo AIP-66: make this bundle and relative fileloc + filename = Column(String(1024)) bundle_name = Column(StringID()) stacktrace = Column(Text) + + def full_file_path(self) -> str: + """Return the full file path of the dag.""" + bundle = DagBundlesManager().get_bundle(self.bundle_name) + return "/".join([str(bundle.path), self.filename]) diff --git a/airflow-core/src/airflow/models/expandinput.py b/airflow-core/src/airflow/models/expandinput.py index f3e6aab168076..6aa44316f6e77 100644 --- a/airflow-core/src/airflow/models/expandinput.py +++ b/airflow-core/src/airflow/models/expandinput.py @@ -20,19 +20,19 @@ import functools import operator from collections.abc import Iterable, Sized -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, ClassVar import attrs if TYPE_CHECKING: + from typing import TypeGuard + from sqlalchemy.orm import Session from airflow.models.xcom_arg import SchedulerXComArg - from airflow.typing_compat import TypeGuard from airflow.sdk.definitions._internal.expandinput import ( DictOfListsExpandInput, - ExpandInput, ListOfDictsExpandInput, MappedArgument, NotFullyPopulated, @@ -62,6 +62,8 @@ def _needs_run_time_resolution(v: OperatorExpandArgument) -> TypeGuard[MappedArg class SchedulerDictOfListsExpandInput: value: dict + EXPAND_INPUT_TYPE: ClassVar[str] = "dict-of-lists" + def _iter_parse_time_resolved_kwargs(self) -> Iterable[tuple[str, Sized]]: """Generate kwargs with values available on parse-time.""" return ((k, v) for k, v in self.value.items() if not _needs_run_time_resolution(v)) @@ -114,6 +116,8 @@ def get_total_map_length(self, run_id: str, *, session: Session) -> int: class SchedulerListOfDictsExpandInput: value: list + EXPAND_INPUT_TYPE: ClassVar[str] = "list-of-dicts" + def get_parse_time_mapped_ti_count(self) -> int: if isinstance(self.value, Sized): return len(self.value) @@ -130,11 +134,13 @@ def get_total_map_length(self, run_id: str, *, session: Session) -> int: return length -_EXPAND_INPUT_TYPES = { +_EXPAND_INPUT_TYPES: dict[str, type[SchedulerExpandInput]] = { "dict-of-lists": SchedulerDictOfListsExpandInput, "list-of-dicts": SchedulerListOfDictsExpandInput, } +SchedulerExpandInput = SchedulerDictOfListsExpandInput | SchedulerListOfDictsExpandInput + -def create_expand_input(kind: str, value: Any) -> ExpandInput: +def create_expand_input(kind: str, value: Any) -> SchedulerExpandInput: return _EXPAND_INPUT_TYPES[kind](value) diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py new file mode 100644 index 0000000000000..74d9e3a747b1a --- /dev/null +++ b/airflow-core/src/airflow/models/hitl.py @@ -0,0 +1,79 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import sqlalchemy_jsonfield +from sqlalchemy import Boolean, Column, ForeignKeyConstraint, String, Text +from sqlalchemy.dialects import postgresql +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.orm import relationship + +from airflow.models.base import Base +from airflow.settings import json +from airflow.utils.sqlalchemy import UtcDateTime + + +class HITLDetail(Base): + """Human-in-the-loop request and corresponding response.""" + + __tablename__ = "hitl_detail" + ti_id = Column( + String(36).with_variant(postgresql.UUID(as_uuid=False), "postgresql"), + primary_key=True, + nullable=False, + ) + + # User Request Detail + options = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False) + subject = Column(Text, nullable=False) + body = Column(Text, nullable=True) + defaults = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + multiple = Column(Boolean, unique=False, default=False) + params = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) + + # Response Content Detail + response_at = Column(UtcDateTime, nullable=True) + user_id = Column(String(128), nullable=True) + chosen_options = Column( + sqlalchemy_jsonfield.JSONField(json=json), + nullable=True, + default=None, + ) + params_input = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) + task_instance = relationship( + "TaskInstance", + lazy="joined", + back_populates="hitl_detail", + ) + + __table_args__ = ( + ForeignKeyConstraint( + (ti_id,), + ["task_instance.id"], + name="hitl_detail_ti_fkey", + ondelete="CASCADE", + onupdate="CASCADE", + ), + ) + + @hybrid_property + def response_received(self) -> bool: + return self.response_at is not None + + @response_received.expression # type: ignore[no-redef] + def response_received(cls): + return cls.response_at.is_not(None) diff --git a/airflow-core/src/airflow/models/log.py b/airflow-core/src/airflow/models/log.py index 8669d228b2504..9347e4a17536c 100644 --- a/airflow-core/src/airflow/models/log.py +++ b/airflow-core/src/airflow/models/log.py @@ -20,9 +20,10 @@ from typing import TYPE_CHECKING from sqlalchemy import Column, Index, Integer, String, Text +from sqlalchemy.orm import relationship +from airflow._shared.timezones import timezone from airflow.models.base import Base, StringID -from airflow.utils import timezone from airflow.utils.sqlalchemy import UtcDateTime if TYPE_CHECKING: @@ -48,6 +49,13 @@ class Log(Base): extra = Column(Text) try_number = Column(Integer) + dag_model = relationship( + "DagModel", + viewonly=True, + foreign_keys=[dag_id], + primaryjoin="Log.dag_id == DagModel.dag_id", + ) + __table_args__ = ( Index("idx_log_dttm", dttm), Index("idx_log_event", event), diff --git a/airflow-core/src/airflow/models/mappedoperator.py b/airflow-core/src/airflow/models/mappedoperator.py index e6fb66962d1ee..7dbd3309fd07e 100644 --- a/airflow-core/src/airflow/models/mappedoperator.py +++ b/airflow-core/src/airflow/models/mappedoperator.py @@ -17,23 +17,50 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING +import functools +import operator +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any import attrs import structlog +from sqlalchemy.orm import Session +from airflow.exceptions import AirflowException +from airflow.sdk.bases.operator import BaseOperator as TaskSDKBaseOperator +from airflow.sdk.definitions._internal.abstractoperator import NotMapped from airflow.sdk.definitions.mappedoperator import MappedOperator as TaskSDKMappedOperator -from airflow.triggers.base import StartTriggerArgs -from airflow.utils.helpers import prevent_duplicates +from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup +from airflow.serialization.serialized_objects import DEFAULT_OPERATOR_DEPS, SerializedBaseOperator if TYPE_CHECKING: - from sqlalchemy.orm.session import Session + from collections.abc import Iterator + from airflow.models import TaskInstance + from airflow.models.dag import DAG as SchedulerDAG + from airflow.sdk import BaseOperatorLink + from airflow.sdk.definitions._internal.node import DAGNode from airflow.sdk.definitions.context import Context + from airflow.ti_deps.deps.base_ti_dep import BaseTIDep log = structlog.get_logger(__name__) +def _prevent_duplicates(kwargs1: dict[str, Any], kwargs2: Mapping[str, Any], *, fail_reason: str) -> None: + """ + Ensure *kwargs1* and *kwargs2* do not contain common keys. + + :raises TypeError: If common keys are found. + """ + duplicated_keys = set(kwargs1).intersection(kwargs2) + if not duplicated_keys: + return + if len(duplicated_keys) == 1: + raise TypeError(f"{fail_reason} argument: {duplicated_keys.pop()}") + duplicated_keys_display = ", ".join(sorted(duplicated_keys)) + raise TypeError(f"{fail_reason} arguments: {duplicated_keys_display}") + + @attrs.define( kw_only=True, # Disable custom __getstate__ and __setstate__ generation since it interacts @@ -46,9 +73,11 @@ getstate_setstate=False, repr=False, ) -class MappedOperator(TaskSDKMappedOperator): # type: ignore[misc] # It complains about weight_rule being different +class MappedOperator(TaskSDKMappedOperator): """Object representing a mapped operator in a DAG.""" + deps: frozenset[BaseTIDep] = attrs.field(init=False, default=DEFAULT_OPERATOR_DEPS) + def expand_start_from_trigger(self, *, context: Context, session: Session) -> bool: """ Get the start_from_trigger value of the current abstract operator. @@ -64,14 +93,19 @@ def expand_start_from_trigger(self, *, context: Context, session: Session) -> bo task_id=self.task_id, dag_id=self.dag_id, ) + + # This is intentional. start_from_trigger does not work correctly with + # sdk-db separation yet, so it is disabled unconditionally for now. + # TODO: TaskSDK: Implement this properly. return False + # start_from_trigger only makes sense when start_trigger_args exists. if not self.start_trigger_args: return False mapped_kwargs, _ = self._expand_mapped_kwargs(context) if self._disallow_kwargs_override: - prevent_duplicates( + _prevent_duplicates( self.partial_kwargs, mapped_kwargs, fail_reason="unmappable or already specified", @@ -82,39 +116,137 @@ def expand_start_from_trigger(self, *, context: Context, session: Session) -> bo "start_from_trigger", self.partial_kwargs.get("start_from_trigger", self.start_from_trigger) ) - def expand_start_trigger_args(self, *, context: Context, session: Session) -> StartTriggerArgs | None: + @functools.cached_property + def operator_extra_link_dict(self) -> dict[str, BaseOperatorLink]: + """Returns dictionary of all extra links for the operator.""" + op_extra_links_from_plugin: dict[str, Any] = {} + from airflow import plugins_manager + + plugins_manager.initialize_extra_operators_links_plugins() + if plugins_manager.operator_extra_links is None: + raise AirflowException("Can't load operators") + operator_class_type = self.operator_class["task_type"] # type: ignore + for ope in plugins_manager.operator_extra_links: + if ope.operators and any(operator_class_type in cls.__name__ for cls in ope.operators): + op_extra_links_from_plugin.update({ope.name: ope}) + + operator_extra_links_all = {link.name: link for link in self.operator_extra_links} + # Extra links defined in Plugins overrides operator links defined in operator + operator_extra_links_all.update(op_extra_links_from_plugin) + + return operator_extra_links_all + + @functools.cached_property + def global_operator_extra_link_dict(self) -> dict[str, Any]: + """Returns dictionary of all global extra links.""" + from airflow import plugins_manager + + plugins_manager.initialize_extra_operators_links_plugins() + if plugins_manager.global_operator_extra_links is None: + raise AirflowException("Can't load operators") + return {link.name: link for link in plugins_manager.global_operator_extra_links} + + @functools.cached_property + def extra_links(self) -> list[str]: + return sorted(set(self.operator_extra_link_dict).union(self.global_operator_extra_link_dict)) + + def get_extra_links(self, ti: TaskInstance, name: str) -> str | None: """ - Get the kwargs to create the unmapped start_trigger_args. + For an operator, gets the URLs that the ``extra_links`` entry points to. + + :meta private: - This method is for allowing mapped operator to start execution from triggerer. + :raise ValueError: The error message of a ValueError will be passed on through to + the fronted to show up as a tooltip on the disabled link. + :param ti: The TaskInstance for the URL being searched for. + :param name: The name of the link we're looking for the URL for. Should be + one of the options specified in ``extra_links``. """ - if not self.start_trigger_args: + link = self.operator_extra_link_dict.get(name) or self.global_operator_extra_link_dict.get(name) + if not link: return None - - mapped_kwargs, _ = self._expand_mapped_kwargs(context) - if self._disallow_kwargs_override: - prevent_duplicates( - self.partial_kwargs, - mapped_kwargs, - fail_reason="unmappable or already specified", + return link.get_link(self, ti_key=ti.key) # type: ignore[arg-type] # TODO: GH-52141 - BaseOperatorLink.get_link expects BaseOperator but receives MappedOperator + + +@functools.singledispatch +def get_mapped_ti_count(task: DAGNode, run_id: str, *, session: Session) -> int: + raise NotImplementedError(f"Not implemented for {type(task)}") + + +# Still accept TaskSDKBaseOperator because some tests don't go through serialization. +# TODO (GH-52141): Rewrite tests so we can drop SDK references at some point. +@get_mapped_ti_count.register(SerializedBaseOperator) +@get_mapped_ti_count.register(TaskSDKBaseOperator) +def _(task: SerializedBaseOperator | TaskSDKBaseOperator, run_id: str, *, session: Session) -> int: + group = task.get_closest_mapped_task_group() + if group is None: + raise NotMapped() + return get_mapped_ti_count(group, run_id, session=session) + + +# Still accept TaskSDKMappedOperator because some tests don't go through serialization. +# TODO (GH-52141): Rewrite tests so we can drop SDK references at some point. +@get_mapped_ti_count.register(MappedOperator) +@get_mapped_ti_count.register(TaskSDKMappedOperator) +def _(task: MappedOperator | TaskSDKMappedOperator, run_id: str, *, session: Session) -> int: + from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef + + exp_input = task._get_specified_expand_input() + if isinstance(exp_input, _ExpandInputRef): + exp_input = exp_input.deref(task.dag) + # TODO (GH-52141): 'task' here should be scheduler-bound and returns scheduler expand input. + if not hasattr(exp_input, "get_total_map_length"): + if TYPE_CHECKING: + assert isinstance(task.dag, SchedulerDAG) + current_count = ( + _ExpandInputRef( + exp_input.EXPAND_INPUT_TYPE, + BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), ) - - # Ordering is significant; mapped kwargs should override partial ones. - trigger_kwargs = mapped_kwargs.get( - "trigger_kwargs", - self.partial_kwargs.get("trigger_kwargs", self.start_trigger_args.trigger_kwargs), - ) - next_kwargs = mapped_kwargs.get( - "next_kwargs", - self.partial_kwargs.get("next_kwargs", self.start_trigger_args.next_kwargs), - ) - timeout = mapped_kwargs.get( - "trigger_timeout", self.partial_kwargs.get("trigger_timeout", self.start_trigger_args.timeout) - ) - return StartTriggerArgs( - trigger_cls=self.start_trigger_args.trigger_cls, - trigger_kwargs=trigger_kwargs, - next_method=self.start_trigger_args.next_method, - next_kwargs=next_kwargs, - timeout=timeout, + .deref(task.dag) + .get_total_map_length(run_id, session=session) ) + else: + current_count = exp_input.get_total_map_length(run_id, session=session) + + group = task.get_closest_mapped_task_group() + if group is None: + return current_count + parent_count = get_mapped_ti_count(group, run_id, session=session) + return parent_count * current_count + + +@get_mapped_ti_count.register +def _(group: TaskGroup, run_id: str, *, session: Session) -> int: + """ + Return the number of instances a task in this group should be mapped to at run time. + + This considers both literal and non-literal mapped arguments, and the + result is therefore available when all depended tasks have finished. The + return value should be identical to ``parse_time_mapped_ti_count`` if + all mapped arguments are literal. + + If this group is inside mapped task groups, all the nested counts are + multiplied and accounted. + + :raise NotFullyPopulated: If upstream tasks are not all complete yet. + :return: Total number of mapped TIs this task should have. + """ + from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef + + def iter_mapped_task_group_lengths(group) -> Iterator[int]: + while group is not None: + if isinstance(group, MappedTaskGroup): + exp_input = group._expand_input + # TODO (GH-52141): 'group' here should be scheduler-bound and returns scheduler expand input. + if not hasattr(exp_input, "get_total_map_length"): + if TYPE_CHECKING: + assert isinstance(group.dag, SchedulerDAG) + exp_input = _ExpandInputRef( + exp_input.EXPAND_INPUT_TYPE, + BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), + ).deref(group.dag) + yield exp_input.get_total_map_length(run_id, session=session) + group = group.parent_group + + return functools.reduce(operator.mul, iter_mapped_task_group_lengths(group)) diff --git a/airflow-core/src/airflow/models/operator.py b/airflow-core/src/airflow/models/operator.py deleted file mode 100644 index b42823e4c2189..0000000000000 --- a/airflow-core/src/airflow/models/operator.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from typing import Union - -from airflow.models.baseoperator import BaseOperator -from airflow.models.mappedoperator import MappedOperator - -Operator = Union[BaseOperator, MappedOperator] - - -__all__ = ["Operator"] diff --git a/airflow-core/src/airflow/models/param.py b/airflow-core/src/airflow/models/param.py deleted file mode 100644 index a7309e5095105..0000000000000 --- a/airflow-core/src/airflow/models/param.py +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -"""Re exporting the new param module from Task SDK for backward compatibility.""" - -from __future__ import annotations - -from airflow.sdk.definitions.param import Param as Param, ParamsDict as ParamsDict diff --git a/airflow-core/src/airflow/models/pool.py b/airflow-core/src/airflow/models/pool.py index 7fc99740461a6..e022910b5a82a 100644 --- a/airflow-core/src/airflow/models/pool.py +++ b/airflow-core/src/airflow/models/pool.py @@ -177,7 +177,7 @@ def slots_stats( pool_rows = session.execute(query) for pool_name, total_slots, include_deferred in pool_rows: if total_slots == -1: - total_slots = float("inf") # type: ignore + total_slots = float("inf") pools[pool_name] = PoolStats( total=total_slots, running=0, queued=0, open=0, deferred=0, scheduled=0 ) diff --git a/airflow-core/src/airflow/models/renderedtifields.py b/airflow-core/src/airflow/models/renderedtifields.py index c971f391d9e24..9e3fb57cb2816 100644 --- a/airflow-core/src/airflow/models/renderedtifields.py +++ b/airflow-core/src/airflow/models/renderedtifields.py @@ -48,10 +48,10 @@ from sqlalchemy.sql import FromClause from airflow.models.taskinstance import TaskInstance, TaskInstanceKey - from airflow.sdk.types import Operator + from airflow.serialization.serialized_objects import SerializedBaseOperator -def get_serialized_template_fields(task: Operator): +def get_serialized_template_fields(task: SerializedBaseOperator): """ Get and serialize the template fields for a task. @@ -125,7 +125,7 @@ def __init__(self, ti: TaskInstance, render_templates=True, rendered_fields=None ti.render_templates() if TYPE_CHECKING: - assert ti.task + assert isinstance(ti.task, SerializedBaseOperator) self.task = ti.task if os.environ.get("AIRFLOW_IS_K8S_EXECUTOR_POD", None): diff --git a/airflow-core/src/airflow/models/serialized_dag.py b/airflow-core/src/airflow/models/serialized_dag.py index 9e4c6115d92ef..e64a0e5cc895e 100644 --- a/airflow-core/src/airflow/models/serialized_dag.py +++ b/airflow-core/src/airflow/models/serialized_dag.py @@ -21,9 +21,9 @@ import logging import zlib -from collections.abc import Iterable, Iterator, Sequence +from collections.abc import Callable, Iterable, Iterator, Sequence from datetime import timedelta -from typing import TYPE_CHECKING, Any, Callable, Literal +from typing import TYPE_CHECKING, Any, Literal import sqlalchemy_jsonfield import uuid6 @@ -32,6 +32,7 @@ from sqlalchemy.sql.expression import func, literal from sqlalchemy_utils import UUIDType +from airflow._shared.timezones import timezone from airflow.exceptions import TaskNotFound from airflow.models.asset import ( AssetAliasModel, @@ -46,7 +47,6 @@ from airflow.serialization.dag_dependency import DagDependency from airflow.serialization.serialized_objects import SerializedDAG from airflow.settings import COMPRESS_SERIALIZED_DAGS, json -from airflow.utils import timezone from airflow.utils.hashlib_wrapper import md5 from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime @@ -295,13 +295,13 @@ class SerializedDagModel(Base): dag_runs = relationship( DagRun, - primaryjoin=dag_id == foreign(DagRun.dag_id), # type: ignore + primaryjoin=dag_id == foreign(DagRun.dag_id), backref=backref("serialized_dag", uselist=False, innerjoin=True), ) dag_model = relationship( DagModel, - primaryjoin=dag_id == DagModel.dag_id, # type: ignore + primaryjoin=dag_id == DagModel.dag_id, # type: ignore[has-type] foreign_keys=dag_id, uselist=False, innerjoin=True, @@ -416,11 +416,16 @@ def write_dag( serialized_dag_hash = session.scalars( select(cls.dag_hash).where(cls.dag_id == dag.dag_id).order_by(cls.created_at.desc()) ).first() + dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) - if serialized_dag_hash is not None and serialized_dag_hash == new_serialized_dag.dag_hash: + if ( + serialized_dag_hash == new_serialized_dag.dag_hash + and dag_version + and dag_version.bundle_name == bundle_name + ): log.debug("Serialized DAG (%s) is unchanged. Skipping writing to DB", dag.dag_id) return False - dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) + if dag_version and not dag_version.task_instances: # This is for dynamic DAGs that the hashes changes often. We should update # the serialized dag, the dag_version and the dag_code instead of a new version diff --git a/airflow-core/src/airflow/models/taskinstance.py b/airflow-core/src/airflow/models/taskinstance.py index 5cf657a36f35f..eac003c816cb6 100644 --- a/airflow-core/src/airflow/models/taskinstance.py +++ b/airflow-core/src/airflow/models/taskinstance.py @@ -23,24 +23,18 @@ import logging import math import operator -import os -import signal -import traceback +import uuid from collections import defaultdict -from collections.abc import Collection, Generator, Iterable, Mapping, Sequence +from collections.abc import Collection, Iterable from datetime import timedelta -from enum import Enum from functools import cache -from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any, cast from urllib.parse import quote import attrs import dill -import jinja2 import lazy_object_proxy import uuid6 -from jinja2 import TemplateAssertionError, UndefinedError from sqlalchemy import ( Column, Float, @@ -74,43 +68,30 @@ from sqlalchemy_utils import UUIDType from airflow import settings +from airflow._shared.timezones import timezone from airflow.assets.manager import asset_manager from airflow.configuration import conf from airflow.exceptions import ( - AirflowException, - AirflowFailException, AirflowInactiveAssetInInletOrOutletException, - AirflowRescheduleException, - AirflowSensorTimeout, - AirflowSkipException, - AirflowTaskTerminated, - AirflowTaskTimeout, TaskDeferralError, TaskDeferred, - UnmappableXComLengthPushed, - UnmappableXComTypePushed, - XComForMappingNotPushed, ) from airflow.listeners.listener import get_listener_manager -from airflow.models.asset import AssetActive, AssetEvent, AssetModel +from airflow.models.asset import AssetEvent, AssetModel from airflow.models.base import Base, StringID, TaskInstanceDependencies -from airflow.models.dagbag import DagBag + +# Import HITLDetail at runtime so SQLAlchemy can resolve the relationship +from airflow.models.hitl import HITLDetail # noqa: F401 from airflow.models.log import Log -from airflow.models.renderedtifields import get_serialized_template_fields from airflow.models.taskinstancekey import TaskInstanceKey from airflow.models.taskmap import TaskMap from airflow.models.taskreschedule import TaskReschedule -from airflow.models.xcom import LazyXComSelectSequence, XComModel -from airflow.plugins_manager import integrate_macros_plugins -from airflow.sdk.execution_time.context import context_to_airflow_vars -from airflow.sentry import Sentry +from airflow.models.xcom import XCOM_RETURN_KEY, LazyXComSelectSequence, XComModel from airflow.settings import task_instance_mutation_hook from airflow.stats import Stats from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.dependencies_deps import REQUEUEABLE_DEPS, RUNNING_DEPS -from airflow.utils import timezone -from airflow.utils.email import send_email -from airflow.utils.helpers import prune_dict, render_template_to_string +from airflow.utils.helpers import prune_dict from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname from airflow.utils.platform import getuser @@ -119,9 +100,6 @@ from airflow.utils.span_status import SpanStatus from airflow.utils.sqlalchemy import ExecutorConfigType, ExtendedJSON, UtcDateTime from airflow.utils.state import DagRunState, State, TaskInstanceState -from airflow.utils.task_instance_session import set_current_task_instance_session -from airflow.utils.timeout import timeout -from airflow.utils.xcom import XCOM_RETURN_KEY TR = TaskReschedule @@ -130,8 +108,7 @@ if TYPE_CHECKING: from datetime import datetime - from pathlib import PurePath - from types import TracebackType + from typing import Literal, TypeAlias import pendulum from sqlalchemy.engine import Connection as SAConnection, Engine @@ -140,33 +117,21 @@ from sqlalchemy.sql.elements import BooleanClauseList from sqlalchemy.sql.expression import ColumnOperators - from airflow.models.abstractoperator import TaskStateChangeCallback - from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG as SchedulerDAG, DagModel from airflow.models.dagrun import DagRun + from airflow.models.mappedoperator import MappedOperator from airflow.sdk.api.datamodels._generated import AssetProfile - from airflow.sdk.definitions._internal.abstractoperator import Operator from airflow.sdk.definitions.asset import AssetNameRef, AssetUniqueKey, AssetUriRef from airflow.sdk.definitions.dag import DAG - from airflow.sdk.definitions.taskgroup import MappedTaskGroup + from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup from airflow.sdk.types import RuntimeTaskInstanceProtocol - from airflow.typing_compat import Literal + from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.utils.context import Context - from airflow.utils.task_group import TaskGroup - - -PAST_DEPENDS_MET = "past_depends_met" - -class TaskReturnCode(Enum): - """ - Enum to signal manner of exit for task run command. + Operator: TypeAlias = MappedOperator | SerializedBaseOperator - :meta private: - """ - DEFERRED = 100 - """When task exits with deferral to trigger.""" +PAST_DEPENDS_MET = "past_depends_met" @provide_session @@ -191,28 +156,6 @@ def _add_log( ) -@contextlib.contextmanager -def set_current_context(context: Context) -> Generator[Context, None, None]: - """ - Set the current execution context to the provided context object. - - This method should be called once per Task execution, before calling operator.execute. - """ - from airflow.sdk.definitions._internal.contextmanager import _CURRENT_CONTEXT - - _CURRENT_CONTEXT.append(context) - try: - yield context - finally: - expected_state = _CURRENT_CONTEXT.pop() - if expected_state != context: - log.warning( - "Current context is not equal to the state at context stack. Expected=%s, got=%s", - context, - expected_state, - ) - - def _stop_remaining_tasks(*, task_instance: TaskInstance, task_teardown_map=None, session: Session): """ Stop non-teardown tasks in dag. @@ -255,8 +198,8 @@ def _stop_remaining_tasks(*, task_instance: TaskInstance, task_teardown_map=None def clear_task_instances( tis: list[TaskInstance], session: Session, - dag: DAG | None = None, dag_run_state: DagRunState | Literal[False] = DagRunState.QUEUED, + run_on_latest_version: bool = False, ) -> None: """ Clear a set of task instances, but make sure the running ones get killed. @@ -271,11 +214,14 @@ def clear_task_instances( :param session: current session :param dag_run_state: state to set finished DagRuns to. If set to False, DagRuns state will not be changed. - :param dag: DAG object + :param run_on_latest_version: whether to run on latest serialized DAG and Bundle version + + :meta private: """ - # taskinstance uuids: task_instance_ids: list[str] = [] - dag_bag = DagBag(read_dags_from_db=True) + from airflow.models.dagbag import DBDagBag + + scheduler_dagbag = DBDagBag(load_op_links=False) for ti in tis: task_instance_ids.append(ti.id) @@ -285,10 +231,17 @@ def clear_task_instances( # the task is terminated and becomes eligible for retry. ti.state = TaskInstanceState.RESTARTING else: - ti_dag = dag if dag and dag.dag_id == ti.dag_id else dag_bag.get_dag(ti.dag_id, session=session) + dr = ti.dag_run + if run_on_latest_version: + ti_dag = scheduler_dagbag.get_latest_version_of_dag(ti.dag_id, session=session) + else: + ti_dag = scheduler_dagbag.get_dag_for_run(dag_run=dr, session=session) + if not ti_dag: + log.warning("No serialized dag found for dag '%s'", dr.dag_id) task_id = ti.task_id if ti_dag and ti_dag.has_task(task_id): - task = ti_dag.get_task(task_id) + # TODO (GH-52141): Make dag a db-backed object so it only returns db-backed tasks. + task = cast("Operator", ti_dag.get_task(task_id)) ti.refresh_from_task(task) if TYPE_CHECKING: assert ti.task @@ -326,6 +279,16 @@ def clear_task_instances( if dr.state in State.finished_dr_states: dr.state = dag_run_state dr.start_date = timezone.utcnow() + if run_on_latest_version: + dr_dag = scheduler_dagbag.get_latest_version_of_dag(dr.dag_id, session=session) + else: + dr_dag = scheduler_dagbag.get_dag_for_run(dag_run=dr, session=session) + if not dr_dag: + log.warning("No serialized dag found for dag '%s'", dr.dag_id) + if dr_dag and not dr_dag.disable_bundle_versioning and run_on_latest_version: + bundle_version = dr.dag_model.bundle_version + if bundle_version is not None and run_on_latest_version: + dr.bundle_version = bundle_version if dag_run_state == DagRunState.QUEUED: dr.last_scheduling_decision = None dr.start_date = None @@ -334,7 +297,7 @@ def clear_task_instances( def _creator_note(val): - """Creator the ``note`` association proxy.""" + """Creator for the ``note`` association proxy.""" if isinstance(val, str): return TaskInstanceNote(content=val) if isinstance(val, dict): @@ -342,172 +305,6 @@ def _creator_note(val): return TaskInstanceNote(*val) -@provide_session -def _record_task_map_for_downstreams( - *, - task_instance: TaskInstance, - task: Operator, - value: Any, - session: Session, -) -> None: - """ - Record the task map for downstream tasks. - - :param task_instance: the task instance - :param task: The task object - :param dag: the dag associated with the task - :param value: The value - :param session: SQLAlchemy ORM Session - - :meta private: - """ - from airflow.sdk.definitions.mappedoperator import MappedOperator, is_mappable_value - - if next(task.iter_mapped_dependants(), None) is None: # No mapped dependants, no need to validate. - return - # TODO: We don't push TaskMap for mapped task instances because it's not - # currently possible for a downstream to depend on one individual mapped - # task instance. This will change when we implement task mapping inside - # a mapped task group, and we'll need to further analyze the case. - if isinstance(task, MappedOperator): - return - if value is None: - raise XComForMappingNotPushed() - if not is_mappable_value(value): - raise UnmappableXComTypePushed(value) - task_map = TaskMap.from_task_instance_xcom(task_instance, value) - max_map_length = conf.getint("core", "max_map_length", fallback=1024) - if task_map.length > max_map_length: - raise UnmappableXComLengthPushed(value, max_map_length) - session.merge(task_map) - - -def _get_email_subject_content( - *, - task_instance: TaskInstance | RuntimeTaskInstanceProtocol, - exception: BaseException, - task: BaseOperator | None = None, -) -> tuple[str, str, str]: - """ - Get the email subject content for exceptions. - - :param task_instance: the task instance - :param exception: the exception sent in the email - :param task: - - :meta private: - """ - # For a ti from DB (without ti.task), return the default value - if task is None: - task = getattr(task_instance, "task") - use_default = task is None - exception_html = str(exception).replace("\n", "
") - - default_subject = "Airflow alert: {{ti}}" - # For reporting purposes, we report based on 1-indexed, - # not 0-indexed lists (i.e. Try 1 instead of - # Try 0 for the first attempt). - default_html_content = ( - "Try {{try_number}} out of {{max_tries + 1}}
" - "Exception:
{{exception_html}}
" - 'Log: Link
' - "Host: {{ti.hostname}}
" - 'Mark success: Link
' - ) - - default_html_content_err = ( - "Try {{try_number}} out of {{max_tries + 1}}
" - "Exception:
Failed attempt to attach error logs
" - 'Log: Link
' - "Host: {{ti.hostname}}
" - 'Mark success: Link
' - ) - - additional_context: dict[str, Any] = { - "exception": exception, - "exception_html": exception_html, - "try_number": task_instance.try_number, - "max_tries": task_instance.max_tries, - } - - if use_default: - default_context = {"ti": task_instance, **additional_context} - jinja_env = jinja2.Environment( - loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), autoescape=True - ) - subject = jinja_env.from_string(default_subject).render(**default_context) - html_content = jinja_env.from_string(default_html_content).render(**default_context) - html_content_err = jinja_env.from_string(default_html_content_err).render(**default_context) - - else: - from airflow.sdk.definitions._internal.templater import SandboxedEnvironment - from airflow.utils.context import context_merge - - if TYPE_CHECKING: - assert task_instance.task - - # Use the DAG's get_template_env() to set force_sandboxed. Don't add - # the flag to the function on task object -- that function can be - # overridden, and adding a flag breaks backward compatibility. - dag = task_instance.task.get_dag() - if dag: - jinja_env = dag.get_template_env(force_sandboxed=True) - else: - jinja_env = SandboxedEnvironment(cache_size=0) - jinja_context = task_instance.get_template_context() - context_merge(jinja_context, additional_context) - - def render(key: str, content: str) -> str: - if conf.has_option("email", key): - path = conf.get_mandatory_value("email", key) - try: - with open(path) as f: - content = f.read() - except FileNotFoundError: - log.warning("Could not find email template file '%s'. Using defaults...", path) - except OSError: - log.exception("Error while using email template %s. Using defaults...", path) - return render_template_to_string(jinja_env.from_string(content), jinja_context) - - subject = render("subject_template", default_subject) - html_content = render("html_content_template", default_html_content) - html_content_err = render("html_content_template", default_html_content_err) - - return subject, html_content, html_content_err - - -def _run_finished_callback( - *, - callbacks: None | TaskStateChangeCallback | Sequence[TaskStateChangeCallback], - context: Context, -) -> None: - """ - Run callback after task finishes. - - :param callbacks: callbacks to run - :param context: callbacks context - - :meta private: - """ - if callbacks: - callbacks = callbacks if isinstance(callbacks, Sequence) else [callbacks] - - def get_callback_representation(callback: TaskStateChangeCallback) -> Any: - with contextlib.suppress(AttributeError): - return callback.__name__ - with contextlib.suppress(AttributeError): - return callback.__class__.__name__ - return callback - - for idx, callback in enumerate(callbacks): - callback_repr = get_callback_representation(callback) - log.info("Executing callback at index %d: %s", idx, callback_repr) - try: - callback(context) - except Exception: - log.exception("Error in callback at index %d: %s", idx, callback_repr) - - def _log_state(*, task_instance: TaskInstance, lead_msg: str = "") -> None: """ Log task state. @@ -629,7 +426,10 @@ class TaskInstance(Base, LoggingMixin): next_kwargs = Column(MutableDict.as_mutable(ExtendedJSON)) _task_display_property_value = Column("task_display_name", String(2000), nullable=True) - dag_version_id = Column(UUIDType(binary=False), ForeignKey("dag_version.id", ondelete="CASCADE")) + dag_version_id = Column( + UUIDType(binary=False), + ForeignKey("dag_version.id", ondelete="RESTRICT"), + ) dag_version = relationship("DagVersion", back_populates="task_instances") __table_args__ = ( @@ -669,6 +469,8 @@ class TaskInstance(Base, LoggingMixin): triggerer_job = association_proxy("trigger", "triggerer_job") dag_run = relationship("DagRun", back_populates="task_instances", lazy="joined", innerjoin=True) rendered_task_instance_fields = relationship("RenderedTaskInstanceFields", lazy="noload", uselist=False) + hitl_detail = relationship("HITLDetail", lazy="noload", uselist=False) + run_after = association_proxy("dag_run", "run_after") logical_date = association_proxy("dag_run", "logical_date") task_instance_note = relationship( @@ -693,10 +495,10 @@ class TaskInstance(Base, LoggingMixin): def __init__( self, task: Operator, + dag_version_id: UUIDType | uuid.UUID, run_id: str | None = None, state: str | None = None, map_index: int = -1, - dag_version_id: UUIDType | None = None, ): super().__init__() self.dag_id = task.dag_id @@ -706,7 +508,6 @@ def __init__( self.refresh_from_task(task) if TYPE_CHECKING: assert self.task - # init_on_load will config the log self.init_on_load() @@ -736,7 +537,7 @@ def stats_tags(self) -> dict[str, str]: @staticmethod def insert_mapping( - run_id: str, task: Operator, map_index: int, dag_version_id: UUIDType | None + run_id: str, task: Operator, map_index: int, dag_version_id: UUIDType ) -> dict[str, Any]: """ Insert mapping. @@ -744,7 +545,7 @@ def insert_mapping( :meta private: """ priority_weight = task.weight_rule.get_weight( - TaskInstance(task=task, run_id=run_id, map_index=map_index) + TaskInstance(task=task, run_id=run_id, map_index=map_index, dag_version_id=dag_version_id) ) return { @@ -791,21 +592,6 @@ def rendered_map_index(self) -> str | None: return str(self.map_index) return None - @classmethod - def from_runtime_ti(cls, runtime_ti: RuntimeTaskInstanceProtocol) -> TaskInstance: - if runtime_ti.map_index is None: - runtime_ti.map_index = -1 - ti = TaskInstance( - run_id=runtime_ti.run_id, - task=runtime_ti.task, # type: ignore[arg-type] - map_index=runtime_ti.map_index, - ) - - if TYPE_CHECKING: - assert ti - assert isinstance(ti, TaskInstance) - return ti - def to_runtime_ti(self, context_from_server) -> RuntimeTaskInstanceProtocol: from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance @@ -821,162 +607,11 @@ def to_runtime_ti(self, context_from_server) -> RuntimeTaskInstanceProtocol: hostname=self.hostname, _ti_context_from_server=context_from_server, start_date=self.start_date, + dag_version_id=self.dag_version_id, ) return runtime_ti - @staticmethod - def _command_as_list( - ti: TaskInstance, - mark_success: bool = False, - ignore_all_deps: bool = False, - ignore_task_deps: bool = False, - ignore_depends_on_past: bool = False, - wait_for_past_depends_before_skipping: bool = False, - ignore_ti_state: bool = False, - local: bool = False, - raw: bool = False, - pool: str | None = None, - cfg_path: str | None = None, - ) -> list[str]: - dag: DAG | DagModel | None - # Use the dag if we have it, else fallback to the ORM dag_model, which might not be loaded - if hasattr(ti, "task") and getattr(ti.task, "dag", None) is not None: - if TYPE_CHECKING: - assert ti.task - assert isinstance(ti.task.dag, SchedulerDAG) - dag = ti.task.dag - else: - dag = ti.dag_model - - if dag is None: - raise ValueError("DagModel is empty") - - path = None - if dag.relative_fileloc: - path = Path(dag.relative_fileloc) - - if path: - if not path.is_absolute(): - path = "DAGS_FOLDER" / path - - return TaskInstance.generate_command( - ti.dag_id, - ti.task_id, - run_id=ti.run_id, - mark_success=mark_success, - ignore_all_deps=ignore_all_deps, - ignore_task_deps=ignore_task_deps, - ignore_depends_on_past=ignore_depends_on_past, - wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, - ignore_ti_state=ignore_ti_state, - local=local, - file_path=path, - raw=raw, - pool=pool, - cfg_path=cfg_path, - map_index=ti.map_index, - ) - - def command_as_list( - self, - mark_success: bool = False, - ignore_all_deps: bool = False, - ignore_task_deps: bool = False, - ignore_depends_on_past: bool = False, - wait_for_past_depends_before_skipping: bool = False, - ignore_ti_state: bool = False, - local: bool = False, - raw: bool = False, - pool: str | None = None, - cfg_path: str | None = None, - ) -> list[str]: - """ - Return a command that can be executed anywhere where airflow is installed. - - This command is part of the message sent to executors by the orchestrator. - """ - return TaskInstance._command_as_list( - ti=self, - mark_success=mark_success, - ignore_all_deps=ignore_all_deps, - ignore_task_deps=ignore_task_deps, - ignore_depends_on_past=ignore_depends_on_past, - wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping, - ignore_ti_state=ignore_ti_state, - local=local, - raw=raw, - pool=pool, - cfg_path=cfg_path, - ) - - @staticmethod - def generate_command( - dag_id: str, - task_id: str, - run_id: str, - mark_success: bool = False, - ignore_all_deps: bool = False, - ignore_depends_on_past: bool = False, - wait_for_past_depends_before_skipping: bool = False, - ignore_task_deps: bool = False, - ignore_ti_state: bool = False, - local: bool = False, - file_path: PurePath | str | None = None, - raw: bool = False, - pool: str | None = None, - cfg_path: str | None = None, - map_index: int = -1, - ) -> list[str]: - """ - Generate the shell command required to execute this task instance. - - :param dag_id: DAG ID - :param task_id: Task ID - :param run_id: The run_id of this task's DagRun - :param mark_success: Whether to mark the task as successful - :param ignore_all_deps: Ignore all ignorable dependencies. - Overrides the other ignore_* parameters. - :param ignore_depends_on_past: Ignore depends_on_past parameter of DAGs - (e.g. for Backfills) - :param wait_for_past_depends_before_skipping: Wait for past depends before marking the ti as skipped - :param ignore_task_deps: Ignore task-specific dependencies such as depends_on_past - and trigger rule - :param ignore_ti_state: Ignore the task instance's previous failure/success - :param local: Whether to run the task locally - :param file_path: path to the file containing the DAG definition - :param raw: raw mode (needs more details) - :param pool: the Airflow pool that the task should run in - :param cfg_path: the Path to the configuration file - :return: shell command that can be used to run the task instance - """ - cmd = ["airflow", "tasks", "run", dag_id, task_id, run_id] - if mark_success: - cmd.extend(["--mark-success"]) - if ignore_all_deps: - cmd.extend(["--ignore-all-dependencies"]) - if ignore_task_deps: - cmd.extend(["--ignore-dependencies"]) - if ignore_depends_on_past: - cmd.extend(["--depends-on-past", "ignore"]) - elif wait_for_past_depends_before_skipping: - cmd.extend(["--depends-on-past", "wait"]) - if ignore_ti_state: - cmd.extend(["--force"]) - if local: - cmd.extend(["--local"]) - if pool: - cmd.extend(["--pool", pool]) - if raw: - cmd.extend(["--raw"]) - if file_path: - cmd.extend(["--subdir", os.fspath(file_path)]) - if cfg_path: - cmd.extend(["--cfg-path", cfg_path]) - if map_index != -1: - cmd.extend(["--map-index", str(map_index)]) - return cmd - @property def log_url(self) -> str: """Log URL for TaskInstance.""" @@ -1052,8 +687,7 @@ def refresh_from_db( """ query = select( # Select the columns, not the ORM object, to bypass any session/ORM caching layer - c - for c in TaskInstance.__table__.columns + *TaskInstance.__table__.columns ).filter_by( dag_id=self.dag_id, run_id=self.run_id, @@ -1108,7 +742,7 @@ def refresh_from_task(self, task: Operator, pool_override: str | None = None) -> self.pool_slots = task.pool_slots with contextlib.suppress(Exception): # This method is called from the different places, and sometimes the TI is not fully initialized - self.priority_weight = self.task.weight_rule.get_weight(self) # type: ignore[arg-type] + self.priority_weight = self.task.weight_rule.get_weight(self) self.run_as_user = task.run_as_user # Do not set max_tries to task.retries here because max_tries is a cumulative # value that needs to be stored in the db. @@ -1119,36 +753,6 @@ def refresh_from_task(self, task: Operator, pool_override: str | None = None) -> # Re-apply cluster policy here so that task default do not overload previous data task_instance_mutation_hook(self) - @staticmethod - @provide_session - def _clear_xcom_data(ti: TaskInstance, session: Session = NEW_SESSION) -> None: - """ - Clear all XCom data from the database for the task instance. - - If the task is unmapped, all XComs matching this task ID in the same DAG - run are removed. If the task is mapped, only the one with matching map - index is removed. - - :param ti: The TI for which we need to clear xcoms. - :param session: SQLAlchemy ORM Session - """ - ti.log.debug("Clearing XCom data") - if ti.map_index < 0: - map_index: int | None = None - else: - map_index = ti.map_index - XComModel.clear( - dag_id=ti.dag_id, - task_id=ti.task_id, - run_id=ti.run_id, - map_index=map_index, - session=session, - ) - - @provide_session - def clear_xcom_data(self, session: Session = NEW_SESSION): - self._clear_xcom_data(ti=self, session=session) - @property def key(self) -> TaskInstanceKey: """Returns a tuple that identifies the task instance uniquely.""" @@ -1316,18 +920,7 @@ def are_dependencies_met( def get_failed_dep_statuses(self, dep_context: DepContext | None = None, session: Session = NEW_SESSION): """Get failed Dependencies.""" if TYPE_CHECKING: - assert isinstance(self.task, BaseOperator) - - if not hasattr(self.task, "deps"): - # These deps are not on BaseOperator since they are only needed and evaluated - # in the scheduler and not needed at the Runtime. - from airflow.serialization.serialized_objects import SerializedBaseOperator - - serialized_op = SerializedBaseOperator.deserialize_operator( - SerializedBaseOperator.serialize_operator(self.task) - ) - setattr(self.task, "deps", serialized_op.deps) # type: ignore[union-attr] - + assert self.task is not None dep_context = dep_context or DepContext() for dep in dep_context.deps | self.task.deps: for dep_status in dep.get_dep_statuses(self, session, dep_context): @@ -1358,12 +951,18 @@ def next_retry_datetime(self): delay = self.task.retry_delay if self.task.retry_exponential_backoff: - # If the min_backoff calculation is below 1, it will be converted to 0 via int. Thus, - # we must round up prior to converting to an int, otherwise a divide by zero error - # will occur in the modded_hash calculation. - # this probably gives unexpected results if a task instance has previously been cleared, - # because try_number can increase without bound - min_backoff = math.ceil(delay.total_seconds() * (2 ** (self.try_number - 1))) + try: + # If the min_backoff calculation is below 1, it will be converted to 0 via int. Thus, + # we must round up prior to converting to an int, otherwise a divide by zero error + # will occur in the modded_hash calculation. + # this probably gives unexpected results if a task instance has previously been cleared, + # because try_number can increase without bound + min_backoff = math.ceil(delay.total_seconds() * (2 ** (self.try_number - 1))) + except OverflowError: + min_backoff = MAX_RETRY_DELAY + self.log.warning( + "OverflowError occurred while calculating min_backoff, using MAX_RETRY_DELAY for min_backoff." + ) # In the case when delay.total_seconds() is 0, min_backoff will not be rounded up to 1. # To address this, we impose a lower bound of 1 on min_backoff. This effectively makes @@ -1475,7 +1074,7 @@ def _check_and_change_state_before_execution( ti: TaskInstance = task_instance task = task_instance.task if TYPE_CHECKING: - assert task + assert isinstance(task, Operator) # TODO (GH-52141): This shouldn't be needed. ti.refresh_from_task(task, pool_override=pool) ti.test_mode = test_mode ti.refresh_from_db(session=session, lock_for_update=True) @@ -1564,7 +1163,7 @@ def _check_and_change_state_before_execution( # Closing all pooled connections to prevent # "max number of connections reached" - settings.engine.dispose() # type: ignore + settings.engine.dispose() if verbose: if mark_success: cls.logger().info("Marking success for %s on %s", ti.task, ti.logical_date) @@ -1661,184 +1260,25 @@ def clear_next_method_args(self) -> None: self.next_kwargs = None @provide_session - @Sentry.enrich_errors def _run_raw_task( self, mark_success: bool = False, - test_mode: bool = False, - pool: str | None = None, - raise_on_defer: bool = False, session: Session = NEW_SESSION, - ) -> TaskReturnCode | None: - """ - Run a task, update the state upon completion, and run any appropriate callbacks. - - Immediately runs the task (without checking or changing db state - before execution) and then sets the appropriate final state after - completion and runs any post-execute callbacks. Meant to be called - only after another function changes the state to running. - - :param mark_success: Don't run the task, mark its state as success - :param test_mode: Doesn't record success or failure in the DB - :param pool: specifies the pool to use to run the task instance - :param session: SQLAlchemy ORM Session - """ - if TYPE_CHECKING: - assert self.task - - if TYPE_CHECKING: - assert isinstance(self.task, BaseOperator) - - self.test_mode = test_mode - self.refresh_from_task(self.task, pool_override=pool) - self.refresh_from_db(session=session) - self.hostname = get_hostname() - self.pid = os.getpid() - if not test_mode: - TaskInstance.save_to_db(ti=self, session=session) - actual_start_date = timezone.utcnow() - Stats.incr(f"ti.start.{self.task.dag_id}.{self.task.task_id}", tags=self.stats_tags) - # Same metric with tagging - Stats.incr("ti.start", tags=self.stats_tags) - # Initialize final state counters at zero - for state in State.task_states: - Stats.incr( - f"ti.finish.{self.task.dag_id}.{self.task.task_id}.{state}", - count=0, - tags=self.stats_tags, - ) - # Same metric with tagging - Stats.incr( - "ti.finish", - count=0, - tags={**self.stats_tags, "state": str(state)}, - ) - with set_current_task_instance_session(session=session): - self.task = self.task.prepare_for_execution() - context = self.get_template_context(ignore_param_exceptions=False, session=session) - - try: - if self.task: - from airflow.sdk.definitions.asset import Asset - - inlets = [asset.asprofile() for asset in self.task.inlets if isinstance(asset, Asset)] - outlets = [asset.asprofile() for asset in self.task.outlets if isinstance(asset, Asset)] - TaskInstance.validate_inlet_outlet_assets_activeness(inlets, outlets, session=session) - if not mark_success: - TaskInstance._execute_task_with_callbacks( - self=self, # type: ignore[arg-type] - context=context, - test_mode=test_mode, - session=session, - ) - if not test_mode: - self.refresh_from_db(lock_for_update=True, session=session, keep_local_changes=True) - self.state = TaskInstanceState.SUCCESS - except TaskDeferred as defer: - # The task has signalled it wants to defer execution based on - # a trigger. - if raise_on_defer: - raise - self.defer_task(exception=defer, session=session) - self.log.info( - "Pausing task as DEFERRED. dag_id=%s, task_id=%s, run_id=%s, logical_date=%s, start_date=%s", - self.dag_id, - self.task_id, - self.run_id, - _date_or_empty(task_instance=self, attr="logical_date"), - _date_or_empty(task_instance=self, attr="start_date"), - ) - return TaskReturnCode.DEFERRED - except AirflowSkipException as e: - # Recording SKIP - # log only if exception has any arguments to prevent log flooding - if e.args: - self.log.info(e) - if not test_mode: - self.refresh_from_db(lock_for_update=True, session=session, keep_local_changes=True) - self.state = TaskInstanceState.SKIPPED - _run_finished_callback(callbacks=self.task.on_skipped_callback, context=context) - TaskInstance.save_to_db(ti=self, session=session) - except AirflowRescheduleException as reschedule_exception: - self._handle_reschedule(actual_start_date, reschedule_exception, test_mode, session=session) - self.log.info("Rescheduling task, marking task as UP_FOR_RESCHEDULE") - return None - except (AirflowFailException, AirflowSensorTimeout) as e: - # If AirflowFailException is raised, task should not retry. - # If a sensor in reschedule mode reaches timeout, task should not retry. - self.handle_failure( - e, test_mode, context, force_fail=True, session=session - ) # already saves to db - raise - except (AirflowTaskTimeout, AirflowException, AirflowTaskTerminated) as e: - if not test_mode: - self.refresh_from_db(lock_for_update=True, session=session) - # for case when task is marked as success/failed externally - # or dagrun timed out and task is marked as skipped - # current behavior doesn't hit the callbacks - if self.state in State.finished: - self.clear_next_method_args() - TaskInstance.save_to_db(ti=self, session=session) - return None - self.handle_failure(e, test_mode, context, session=session) - raise - except SystemExit as e: - # We have already handled SystemExit with success codes (0 and None) in the `_execute_task`. - # Therefore, here we must handle only error codes. - msg = f"Task failed due to SystemExit({e.code})" - self.handle_failure(msg, test_mode, context, session=session) - raise AirflowException(msg) - except BaseException as e: - self.handle_failure(e, test_mode, context, session=session) - raise - finally: - # Print a marker post execution for internals of post task processing - log.info("::group::Post task execution logs") - - Stats.incr( - f"ti.finish.{self.dag_id}.{self.task_id}.{self.state}", - tags=self.stats_tags, - ) - # Same metric with tagging - Stats.incr("ti.finish", tags={**self.stats_tags, "state": str(self.state)}) - - # Recording SKIPPED or SUCCESS - self.clear_next_method_args() - self.end_date = timezone.utcnow() - _log_state(task_instance=self) - self.set_duration() - - # run on_success_callback before db committing - # otherwise, the LocalTaskJob sees the state is changed to `success`, - # but the task_runner is still running, LocalTaskJob then treats the state is set externally! - if self.state == TaskInstanceState.SUCCESS: - _run_finished_callback(callbacks=self.task.on_success_callback, context=context) - - if not test_mode: - _add_log(event=self.state, task_instance=self, session=session) - if self.state == TaskInstanceState.SUCCESS: - from airflow.sdk.execution_time.task_runner import ( - _build_asset_profiles, - _serialize_outlet_events, - ) - - TaskInstance.register_asset_changes_in_db( - self, - list(_build_asset_profiles(self.task.outlets)), - list(_serialize_outlet_events(context["outlet_events"])), - session=session, - ) + **kwargs: Any, + ) -> None: + """Only kept for tests.""" + from airflow.sdk.definitions.dag import _run_task - TaskInstance.save_to_db(ti=self, session=session) - if self.state == TaskInstanceState.SUCCESS: - try: - get_listener_manager().hook.on_task_instance_success( - previous_state=TaskInstanceState.RUNNING, task_instance=self - ) - except Exception: - log.exception("error calling listener") + if mark_success: + self.set_state(TaskInstanceState.SUCCESS) + log.info("[DAG TEST] Marking success for %s ", self.task_id) return None + taskrun_result = _run_task(ti=self, task=self.task) + if taskrun_result is not None and taskrun_result.error: + raise taskrun_result.error + return None + @staticmethod @provide_session def register_asset_changes_in_db( @@ -1979,252 +1419,6 @@ def update_rtif(self, rendered_fields, session: Session = NEW_SESSION): session.flush() RenderedTaskInstanceFields.delete_old_records(self.task_id, self.dag_id, session=session) - def _execute_task_with_callbacks(self, context: Context, test_mode: bool = False, *, session: Session): - """Prepare Task for Execution.""" - from airflow.sdk.execution_time.callback_runner import create_executable_runner - from airflow.sdk.execution_time.context import context_get_outlet_events - - if TYPE_CHECKING: - assert self.task - - parent_pid = os.getpid() - - def signal_handler(signum, frame): - pid = os.getpid() - - # If a task forks during execution (from DAG code) for whatever - # reason, we want to make sure that we react to the signal only in - # the process that we've spawned ourselves (referred to here as the - # parent process). - if pid != parent_pid: - os._exit(1) - return - self.log.error("Received SIGTERM. Terminating subprocesses.") - self.log.error("Stacktrace: \n%s", "".join(traceback.format_stack())) - self.task.on_kill() - raise AirflowTaskTerminated( - f"Task received SIGTERM signal {self.task_id=} {self.dag_id=} {self.run_id=} {self.map_index=}" - ) - - signal.signal(signal.SIGTERM, signal_handler) - - # Don't clear Xcom until the task is certain to execute, and check if we are resuming from deferral. - if not self.next_method: - self.clear_xcom_data() - - with ( - Stats.timer(f"dag.{self.task.dag_id}.{self.task.task_id}.duration"), - Stats.timer("task.duration", tags=self.stats_tags), - ): - # Set the validated/merged params on the task object. - self.task.params = context["params"] - - with set_current_context(context): - dag = self.task.get_dag() - if dag is not None: - jinja_env = dag.get_template_env() - else: - jinja_env = None - task_orig = self.render_templates(context=context, jinja_env=jinja_env) - - # The task is never MappedOperator at this point. - if TYPE_CHECKING: - assert isinstance(self.task, BaseOperator) - - if not test_mode: - rendered_fields = get_serialized_template_fields(task=self.task) - self.update_rtif(rendered_fields=rendered_fields) - # Export context to make it available for operators to use. - airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True) - os.environ.update(airflow_context_vars) - - # Log context only for the default execution method, the assumption - # being that otherwise we're resuming a deferred task (in which - # case there's no need to log these again). - if not self.next_method: - self.log.info( - "Exporting env vars: %s", - " ".join(f"{k}={v!r}" for k, v in airflow_context_vars.items()), - ) - - # Run pre_execute callback - if self.task._pre_execute_hook: - create_executable_runner( - self.task._pre_execute_hook, - context_get_outlet_events(context), - logger=self.log, - ).run(context) - create_executable_runner( - self.task.pre_execute, - context_get_outlet_events(context), - logger=self.log, - ).run(context) - - # Run on_execute callback - self._run_execute_callback(context, self.task) - - # Run on_task_instance_running event - try: - get_listener_manager().hook.on_task_instance_running( - previous_state=TaskInstanceState.QUEUED, task_instance=self - ) - except Exception: - log.exception("error calling listener") - - def _render_map_index(context: Context, *, jinja_env: jinja2.Environment | None) -> str | None: - """Render named map index if the DAG author defined map_index_template at the task level.""" - if jinja_env is None or (template := context.get("map_index_template")) is None: - return None - rendered_map_index = jinja_env.from_string(template).render(context) - log.debug("Map index rendered as %s", rendered_map_index) - return rendered_map_index - - # Execute the task. - with set_current_context(context): - try: - result = self._execute_task(context, task_orig) - except Exception: - # If the task failed, swallow rendering error so it doesn't mask the main error. - with contextlib.suppress(jinja2.TemplateSyntaxError, jinja2.UndefinedError): - self._rendered_map_index = _render_map_index(context, jinja_env=jinja_env) - raise - else: # If the task succeeded, render normally to let rendering error bubble up. - self._rendered_map_index = _render_map_index(context, jinja_env=jinja_env) - - # Run post_execute callback - if self.task._post_execute_hook: - create_executable_runner( - self.task._post_execute_hook, - context_get_outlet_events(context), - logger=self.log, - ).run(context, result) - create_executable_runner( - self.task.post_execute, - context_get_outlet_events(context), - logger=self.log, - ).run(context, result) - - Stats.incr(f"operator_successes_{self.task.task_type}", tags=self.stats_tags) - # Same metric with tagging - Stats.incr("operator_successes", tags={**self.stats_tags, "task_type": self.task.task_type}) - Stats.incr("ti_successes", tags=self.stats_tags) - - def _execute_task(self, context: Context, task_orig: Operator): - """ - Execute Task (optionally with a Timeout) and push Xcom results. - - :param context: Jinja2 context - :param task_orig: origin task - """ - from airflow.sdk.bases.operator import ExecutorSafeguard - from airflow.sdk.definitions.mappedoperator import MappedOperator - - task_to_execute = self.task - - if TYPE_CHECKING: - # TODO: TaskSDK this function will need 100% re-writing - # This only works with a "rich" BaseOperator, not the SDK version - assert isinstance(task_to_execute, BaseOperator) - - if isinstance(task_to_execute, MappedOperator): - raise AirflowException("MappedOperator cannot be executed.") - - # If the task has been deferred and is being executed due to a trigger, - # then we need to pick the right method to come back to, otherwise - # we go for the default execute - execute_callable_kwargs: dict[str, Any] = {} - execute_callable: Callable - if self.next_method: - execute_callable = task_to_execute.resume_execution - execute_callable_kwargs["next_method"] = self.next_method - # We don't want modifictions we make here to be tracked by SQLA - execute_callable_kwargs["next_kwargs"] = {**(self.next_kwargs or {})} - if self.next_method == "execute": - execute_callable_kwargs["next_kwargs"][f"{task_to_execute.__class__.__name__}__sentinel"] = ( - ExecutorSafeguard.sentinel_value - ) - else: - execute_callable = task_to_execute.execute - if execute_callable.__name__ == "execute": - execute_callable_kwargs[f"{task_to_execute.__class__.__name__}__sentinel"] = ( - ExecutorSafeguard.sentinel_value - ) - - def _execute_callable(context: Context, **execute_callable_kwargs): - from airflow.sdk.execution_time.callback_runner import create_executable_runner - from airflow.sdk.execution_time.context import context_get_outlet_events - - try: - # Print a marker for log grouping of details before task execution - log.info("::endgroup::") - - return create_executable_runner( - execute_callable, - context_get_outlet_events(context), - logger=log, - ).run(context=context, **execute_callable_kwargs) - except SystemExit as e: - # Handle only successful cases here. Failure cases will be handled upper - # in the exception chain. - if e.code is not None and e.code != 0: - raise - return None - - # If a timeout is specified for the task, make it fail - # if it goes beyond - if task_to_execute.execution_timeout: - # If we are coming in with a next_method (i.e. from a deferral), - # calculate the timeout from our start_date. - if self.next_method and self.start_date: - timeout_seconds = ( - task_to_execute.execution_timeout - (timezone.utcnow() - self.start_date) - ).total_seconds() - else: - timeout_seconds = task_to_execute.execution_timeout.total_seconds() - try: - # It's possible we're already timed out, so fast-fail if true - if timeout_seconds <= 0: - raise AirflowTaskTimeout() - # Run task in timeout wrapper - with timeout(timeout_seconds): - result = _execute_callable(context=context, **execute_callable_kwargs) - except AirflowTaskTimeout: - task_to_execute.on_kill() - raise - else: - result = _execute_callable(context=context, **execute_callable_kwargs) - cm = create_session() - with cm as session_or_null: - if task_to_execute.do_xcom_push: - xcom_value = result - else: - xcom_value = None - if xcom_value is not None: # If the task returns a result, push an XCom containing it. - if task_to_execute.multiple_outputs: - if not isinstance(xcom_value, Mapping): - raise AirflowException( - f"Returned output was type {type(xcom_value)} " - "expected dictionary for multiple_outputs" - ) - for key in xcom_value.keys(): - if not isinstance(key, str): - raise AirflowException( - "Returned dictionary keys must be strings when using " - f"multiple_outputs, found {key} ({type(key)}) instead" - ) - for key, value in xcom_value.items(): - self.xcom_push(key=key, value=value, session=session_or_null) - self.xcom_push(key=XCOM_RETURN_KEY, value=xcom_value, session=session_or_null) - if TYPE_CHECKING: - assert task_orig.dag - _record_task_map_for_downstreams( - task_instance=self, - task=task_orig, - value=xcom_value, - session=session_or_null, - ) - return result - def update_heartbeat(self): with create_session() as session: session.execute( @@ -2244,7 +1438,7 @@ def defer_task(self, exception: TaskDeferred | None, session: Session = NEW_SESS # TODO: TaskSDK add start_trigger_args to SDK definitions if TYPE_CHECKING: - assert self.task is None or isinstance(self.task, BaseOperator) + assert self.task is not None timeout: timedelta | None if exception is not None: @@ -2254,7 +1448,7 @@ def defer_task(self, exception: TaskDeferred | None, session: Session = NEW_SESS timeout = exception.timeout elif self.task is not None and self.task.start_trigger_args is not None: context = self.get_template_context() - start_trigger_args = self.task.expand_start_trigger_args(context=context, session=session) + start_trigger_args = self.task.expand_start_trigger_args(context=context) if start_trigger_args is None: raise TaskDeferralError( "A none 'None' start_trigger_args has been change to 'None' during expandion" @@ -2309,16 +1503,6 @@ def defer_task(self, exception: TaskDeferred | None, session: Session = NEW_SESS session.merge(self) session.commit() - def _run_execute_callback(self, context: Context, task: BaseOperator) -> None: - """Functions that need to be run before a Task is executed.""" - if not (callbacks := task.on_execute_callback): - return - for callback in callbacks if isinstance(callbacks, list) else [callbacks]: - try: - callback(context) - except Exception: - self.log.exception("Failed when executing execute callback") - @provide_session def run( self, @@ -2334,7 +1518,22 @@ def run( session: Session = NEW_SESSION, raise_on_defer: bool = False, ) -> None: - """Run TaskInstance.""" + """Run TaskInstance (only kept for tests).""" + # This method is only used in ti.run and dag.test and task.test. + # So doing the s10n/de-s10n dance to operator on Serialized task for the scheduler dep check part. + from airflow.serialization.serialized_objects import SerializedDAG + + original_task = self.task + if TYPE_CHECKING: + assert original_task is not None + assert original_task.dag is not None + + serialized_task = SerializedDAG.deserialize_dag( + SerializedDAG.serialize_dag(original_task.dag) + ).task_dict[original_task.task_id] + # TODO (GH-52141): task_dict in scheduler should contain scheduler + # types instead, but currently it inherits SDK's DAG. + self.task = cast("Operator", serialized_task) res = self.check_and_change_state_before_execution( verbose=verbose, ignore_all_deps=ignore_all_deps, @@ -2347,95 +1546,18 @@ def run( pool=pool, session=session, ) + self.task = original_task if not res: return - self._run_raw_task( - mark_success=mark_success, - test_mode=test_mode, - pool=pool, - session=session, - raise_on_defer=raise_on_defer, - ) - - def dry_run(self) -> None: - """Only Renders Templates for the TI.""" - if TYPE_CHECKING: - assert self.task - - self.task = self.task.prepare_for_execution() - self.render_templates() - if TYPE_CHECKING: - assert isinstance(self.task, BaseOperator) - self.task.dry_run() - - @provide_session - def _handle_reschedule( - self, - actual_start_date: datetime, - reschedule_exception: AirflowRescheduleException, - test_mode: bool = False, - session: Session = NEW_SESSION, - ): - # Don't record reschedule request in test mode - if test_mode: - return - - self.refresh_from_db(session) - - if TYPE_CHECKING: - assert self.task - - self.end_date = timezone.utcnow() - self.set_duration() - - # set state - self.state = TaskInstanceState.UP_FOR_RESCHEDULE - - self.clear_next_method_args() - - session.merge(self) - session.commit() - - # we add this in separate commit to reduce likelihood of deadlock - # see https://github.com/apache/airflow/pull/21362 for more info - session.add( - TaskReschedule( - self.id, - actual_start_date, - self.end_date, - reschedule_exception.reschedule_date, - ) - ) - session.commit() - return self - - @staticmethod - def get_truncated_error_traceback(error: BaseException, truncate_to: Callable) -> TracebackType | None: - """ - Truncate the traceback of an exception to the first frame called from within a given function. - - :param error: exception to get traceback from - :param truncate_to: Function to truncate TB to. Must have a ``__code__`` attribute - - :meta private: - """ - tb = error.__traceback__ - code = truncate_to.__func__.__code__ # type: ignore[attr-defined] - while tb is not None: - if tb.tb_frame.f_code is code: - return tb.tb_next - tb = tb.tb_next - return tb or error.__traceback__ + self._run_raw_task(mark_success=mark_success) @classmethod def fetch_handle_failure_context( cls, ti: TaskInstance, - error: None | str | BaseException, + error: None | str, test_mode: bool | None = None, - context: Context | None = None, - force_fail: bool = False, *, session: Session, fail_fast: bool = False, @@ -2446,17 +1568,11 @@ def fetch_handle_failure_context( :param ti: TaskInstance :param error: if specified, log the specific exception if thrown :param test_mode: doesn't record success or failure in the DB if True - :param context: Jinja2 context - :param force_fail: if True, task does not retry :param session: SQLAlchemy ORM Session :param fail_fast: if True, fail all downstream tasks """ if error: - if isinstance(error, BaseException): - tb = TaskInstance.get_truncated_error_traceback(error, truncate_to=ti._execute_task) - cls.logger().error("Task failed with exception", exc_info=(type(error), error, tb)) - else: - cls.logger().error("%s", error) + cls.logger().error("%s", error) if not test_mode: ti.refresh_from_db(session) @@ -2473,8 +1589,9 @@ def fetch_handle_failure_context( ti.clear_next_method_args() + context = None # In extreme cases (task instance heartbeat timeout in case of dag with parse error) we might _not_ have a Task. - if context is None and getattr(ti, "task", None): + if getattr(ti, "task", None): context = ti.get_template_context(session) if context is not None: @@ -2492,16 +1609,15 @@ def fetch_handle_failure_context( # only mark task instance as FAILED if the next task instance # try_number exceeds the max_tries ... or if force_fail is truthy - task: BaseOperator | None = None + task: SerializedBaseOperator | None = None try: - if getattr(ti, "task", None) and context: - if TYPE_CHECKING: - assert isinstance(ti.task, BaseOperator) - task = ti.task.unmap((context, session)) + if (orig_task := getattr(ti, "task", None)) and context: + # TODO (GH-52141): Move runtime unmap into task runner. + task = orig_task.unmap((context, session)) except Exception: cls.logger().error("Unable to unmap task to determine if we need to send an alert email") - if force_fail or not ti.is_eligible_to_retry(): + if not ti.is_eligible_to_retry(): ti.state = TaskInstanceState.FAILED email_for_state = operator.attrgetter("email_on_failure") callbacks = task.on_failure_callback if task else None @@ -2545,20 +1661,16 @@ def save_to_db(ti: TaskInstance, session: Session = NEW_SESSION): @provide_session def handle_failure( self, - error: None | str | BaseException, + error: None | str, test_mode: bool | None = None, - context: Context | None = None, - force_fail: bool = False, session: Session = NEW_SESSION, ) -> None: """ Handle Failure for a task instance. :param error: if specified, log the specific exception if thrown - :param session: SQLAlchemy ORM Session :param test_mode: doesn't record success or failure in the DB if True - :param context: Jinja2 context - :param force_fail: if True, task does not retry + :param session: SQLAlchemy ORM Session """ if TYPE_CHECKING: assert self.task @@ -2570,31 +1682,28 @@ def handle_failure( if test_mode is None: test_mode = self.test_mode failure_context = TaskInstance.fetch_handle_failure_context( - ti=self, # type: ignore[arg-type] + ti=self, error=error, test_mode=test_mode, - context=context, - force_fail=force_fail, session=session, fail_fast=fail_fast, ) - _log_state(task_instance=self, lead_msg="Immediate failure requested. " if force_fail else "") + _log_state(task_instance=self) if ( - failure_context["task"] - and failure_context["email_for_state"](failure_context["task"]) - and failure_context["task"].email + (failure_task := failure_context["task"]) + and failure_context["email_for_state"](failure_task) + and (failure_email := failure_task.email) ): try: - self.email_alert(error, failure_context["task"]) - except Exception: - log.exception("Failed to send email to: %s", failure_context["task"].email) + import structlog - if failure_context["callbacks"] and failure_context["context"]: - _run_finished_callback( - callbacks=failure_context["callbacks"], - context=failure_context["context"], - ) + from airflow.sdk.execution_time.task_runner import _send_task_error_email + + log = structlog.get_logger(logger_name="task") + _send_task_error_email(failure_email, self, error, log=log) + except Exception: + log.exception("Failed to send email to: %s", failure_email) if not test_mode: TaskInstance.save_to_db(failure_context["ti"], session) @@ -2615,6 +1724,7 @@ def is_eligible_to_retry(self) -> bool: return bool(self.task.retries and self.try_number <= self.max_tries) + # TODO (GH-52141): We should remove this entire function (only makes sense at runtime). def get_template_context( self, session: Session | None = None, @@ -2626,22 +1736,17 @@ def get_template_context( :param session: SQLAlchemy ORM Session :param ignore_param_exceptions: flag to suppress value exceptions while initializing the ParamsDict """ - if TYPE_CHECKING: - assert self.task - assert isinstance(self.task.dag, SchedulerDAG) - # Do not use provide_session here -- it expunges everything on exit! if not session: session = settings.Session() - from airflow import macros - from airflow.models.abstractoperator import NotMapped - from airflow.models.baseoperator import BaseOperator + from airflow.models.mappedoperator import get_mapped_ti_count from airflow.sdk.api.datamodels._generated import ( DagRun as DagRunSDK, PrevSuccessfulDagRunResponse, TIRunContext, ) + from airflow.sdk.definitions._internal.abstractoperator import NotMapped from airflow.sdk.definitions.param import process_params from airflow.sdk.execution_time.context import InletEventsAccessors from airflow.utils.context import ( @@ -2650,13 +1755,7 @@ def get_template_context( VariableAccessor, ) - integrate_macros_plugins() - - task = self.task if TYPE_CHECKING: - assert self.task - assert task - assert task.dag assert session def _get_dagrun(session: Session) -> DagRun: @@ -2676,11 +1775,11 @@ def _get_dagrun(session: Session) -> DagRun: return dag_run return session.merge(dag_run, load=False) + task: Any = self.task + dag = task.dag dag_run = _get_dagrun(session) - validated_params = process_params( - self.task.dag, task, dag_run.conf, suppress_exception=ignore_param_exceptions - ) + validated_params = process_params(dag, task, dag_run.conf, suppress_exception=ignore_param_exceptions) ti_context_from_server = TIRunContext( dag_run=DagRunSDK.model_validate(dag_run, from_attributes=True), max_tries=self.max_tries, @@ -2727,7 +1826,6 @@ def get_triggering_events() -> dict[str, list[AssetEvent]]: { "outlet_events": OutletEventAccessors(), "inlet_events": InletEventsAccessors(task.inlets), - "macros": macros, "params": validated_params, "prev_data_interval_start_success": get_prev_data_interval_start_success(), "prev_data_interval_end_success": get_prev_data_interval_end_success(), @@ -2747,9 +1845,7 @@ def get_triggering_events() -> dict[str, list[AssetEvent]]: ) try: - expanded_ti_count: int | None = BaseOperator.get_mapped_ti_count( - task, self.run_id, session=session - ) + expanded_ti_count: int | None = get_mapped_ti_count(task, self.run_id, session=session) context["expanded_ti_count"] = expanded_ti_count if expanded_ti_count: setattr( @@ -2757,7 +1853,7 @@ def get_triggering_events() -> dict[str, list[AssetEvent]]: "_upstream_map_indexes", { upstream.task_id: self.get_relevant_upstream_map_indexes( - upstream, + cast("Operator", upstream), expanded_ti_count, session=session, ) @@ -2769,50 +1865,9 @@ def get_triggering_events() -> dict[str, list[AssetEvent]]: return context - @provide_session - def get_rendered_template_fields(self, session: Session = NEW_SESSION) -> None: - """ - Update task with rendered template fields for presentation in UI. - - If task has already run, will fetch from DB; otherwise will render. - """ - from airflow.models.renderedtifields import RenderedTaskInstanceFields - - if TYPE_CHECKING: - assert isinstance(self.task, BaseOperator) - - rendered_task_instance_fields = RenderedTaskInstanceFields.get_templated_fields(self, session=session) - if rendered_task_instance_fields: - self.task = self.task.unmap(None) - for field_name, rendered_value in rendered_task_instance_fields.items(): - setattr(self.task, field_name, rendered_value) - return - - try: - # If we get here, either the task hasn't run or the RTIF record was purged. - from airflow.sdk.execution_time.secrets_masker import redact - - self.render_templates() - for field_name in self.task.template_fields: - rendered_value = getattr(self.task, field_name) - setattr(self.task, field_name, redact(rendered_value, field_name)) - except (TemplateAssertionError, UndefinedError) as e: - raise AirflowException( - "Webserver does not have access to User-defined Macros or Filters " - "when Dag Serialization is enabled. Hence for the task that have not yet " - "started running, please use 'airflow tasks render' for debugging the " - "rendering of template_fields." - ) from e - - def overwrite_params_with_dag_run_conf(self, params: dict, dag_run: DagRun): - """Overwrite Task Params with DagRun.conf.""" - if dag_run and dag_run.conf: - self.log.debug("Updating task params (%s) with DagRun.conf (%s)", params, dag_run.conf) - params.update(dag_run.conf) - - def render_templates( - self, context: Context | None = None, jinja_env: jinja2.Environment | None = None - ) -> Operator: + # TODO (GH-52141): We should remove this entire function (only makes sense at runtime). + # This is intentionally left untyped so Mypy complains less about this dead code. + def render_templates(self, context=None, jinja_env=None): """ Render templates in the operator fields. @@ -2826,49 +1881,16 @@ def render_templates( context = self.get_template_context() original_task = self.task - ti = context["ti"] - - if TYPE_CHECKING: - assert original_task - assert self.task - assert ti.task - # If self.task is mapped, this call replaces self.task to point to the # unmapped BaseOperator created by this function! This is because the # MappedOperator is useless for template rendering, and we need to be # able to access the unmapped task instead. original_task.render_template_fields(context, jinja_env) if isinstance(self.task, MappedOperator): - self.task = context["ti"].task # type: ignore[assignment] + self.task = context["ti"].task return original_task - def get_email_subject_content( - self, exception: BaseException, task: BaseOperator | None = None - ) -> tuple[str, str, str]: - """ - Get the email subject content for exceptions. - - :param exception: the exception sent in the email - :param task: - """ - return _get_email_subject_content(task_instance=self, exception=exception, task=task) - - def email_alert(self, exception, task: BaseOperator) -> None: - """ - Send alert email with exception information. - - :param exception: the exception - :param task: task related to the exception - """ - subject, html_content, html_content_err = self.get_email_subject_content(exception, task=task) - if TYPE_CHECKING: - assert task.email - try: - send_email(task.email, subject, html_content) - except Exception: - send_email(task.email, subject, html_content_err) - def set_duration(self) -> None: """Set task instance duration.""" if self.end_date and self.start_date: @@ -3149,10 +2171,10 @@ def tg2(inp): :return: Specific map index or map indexes to pull, or ``None`` if we want to "whole" return value (i.e. no mapped task groups involved). """ - from airflow.models.baseoperator import BaseOperator + from airflow.models.mappedoperator import get_mapped_ti_count if TYPE_CHECKING: - assert self.task + assert self.task is not None # This value should never be None since we already know the current task # is in a mapped task group, and should have been expanded, despite that, @@ -3174,7 +2196,7 @@ def tg2(inp): # should use a "partial" value. Let's break down the mapped ti count # between the ancestor and further expansion happened inside it. - ancestor_ti_count = BaseOperator.get_mapped_ti_count(common_ancestor, self.run_id, session=session) + ancestor_ti_count = get_mapped_ti_count(common_ancestor, self.run_id, session=session) ancestor_map_index = self.map_index * ancestor_ti_count // ti_count # If the task is NOT further expanded inside the common ancestor, we @@ -3254,60 +2276,6 @@ def duration_expression_update( } ) - @staticmethod - def validate_inlet_outlet_assets_activeness( - inlets: list[AssetProfile], outlets: list[AssetProfile], session: Session - ) -> None: - from airflow.sdk.definitions.asset import AssetUniqueKey - - if not (inlets or outlets): - return - - all_asset_unique_keys = { - AssetUniqueKey.from_asset(inlet_or_outlet) # type: ignore - for inlet_or_outlet in itertools.chain(inlets, outlets) - } - inactive_asset_unique_keys = TaskInstance._get_inactive_asset_unique_keys( - all_asset_unique_keys, session - ) - if inactive_asset_unique_keys: - raise AirflowInactiveAssetInInletOrOutletException(inactive_asset_unique_keys) - - @staticmethod - def _get_inactive_asset_unique_keys( - asset_unique_keys: set[AssetUniqueKey], session: Session - ) -> set[AssetUniqueKey]: - from airflow.sdk.definitions.asset import AssetUniqueKey - - active_asset_unique_keys = { - AssetUniqueKey(name, uri) - for name, uri in session.execute( - select(AssetActive.name, AssetActive.uri).where( - tuple_(AssetActive.name, AssetActive.uri).in_( - attrs.astuple(key) for key in asset_unique_keys - ) - ) - ) - } - return asset_unique_keys - active_asset_unique_keys - - def get_first_reschedule_date(self, context: Context) -> datetime | None: - """Get the first reschedule date for the task instance.""" - if TYPE_CHECKING: - assert isinstance(self.task, BaseOperator) - - with create_session() as session: - start_date = session.scalar( - select(TaskReschedule) - .where( - TaskReschedule.ti_id == str(self.id), - ) - .order_by(TaskReschedule.id.asc()) - .with_only_columns(TaskReschedule.start_date) - .limit(1) - ) - return start_date - def _find_common_ancestor_mapped_group(node1: Operator, node2: Operator) -> MappedTaskGroup | None: """Given two operators, find their innermost common mapped task group.""" @@ -3333,97 +2301,6 @@ def _is_further_mapped_inside(operator: Operator, container: TaskGroup) -> bool: return False -# State of the task instance. -# Stores string version of the task state. -TaskInstanceStateType = tuple[TaskInstanceKey, TaskInstanceState] - - -class SimpleTaskInstance: - """ - Simplified Task Instance. - - Used to send data between processes via Queues. - """ - - def __init__( - self, - dag_id: str, - task_id: str, - run_id: str, - queued_dttm: datetime | None, - start_date: datetime | None, - end_date: datetime | None, - try_number: int, - map_index: int, - state: str, - executor: str | None, - executor_config: Any, - pool: str, - queue: str, - key: TaskInstanceKey, - run_as_user: str | None = None, - priority_weight: int | None = None, - parent_context_carrier: dict | None = None, - context_carrier: dict | None = None, - span_status: str | None = None, - ): - self.dag_id = dag_id - self.task_id = task_id - self.run_id = run_id - self.map_index = map_index - self.queued_dttm = queued_dttm - self.start_date = start_date - self.end_date = end_date - self.try_number = try_number - self.state = state - self.executor = executor - self.executor_config = executor_config - self.run_as_user = run_as_user - self.pool = pool - self.priority_weight = priority_weight - self.queue = queue - self.key = key - self.parent_context_carrier = parent_context_carrier - self.context_carrier = context_carrier - self.span_status = span_status - - def __repr__(self) -> str: - attrs = ", ".join(f"{k}={v!r}" for k, v in self.__dict__.items()) - return f"SimpleTaskInstance({attrs})" - - def __eq__(self, other) -> bool: - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return NotImplemented - - @classmethod - def from_ti(cls, ti: TaskInstance) -> SimpleTaskInstance: - return cls( - dag_id=ti.dag_id, - task_id=ti.task_id, - run_id=ti.run_id, - map_index=ti.map_index, - queued_dttm=ti.queued_dttm, - start_date=ti.start_date, - end_date=ti.end_date, - try_number=ti.try_number, - state=ti.state, - executor=ti.executor, - executor_config=ti.executor_config, - pool=ti.pool, - queue=ti.queue, - key=ti.key, - run_as_user=ti.run_as_user if hasattr(ti, "run_as_user") else None, - priority_weight=ti.priority_weight if hasattr(ti, "priority_weight") else None, - # Inspect the ti, to check if the 'dag_run' relationship is loaded. - parent_context_carrier=ti.dag_run.context_carrier - if "dag_run" not in inspect(ti).unloaded - else None, - context_carrier=ti.context_carrier if hasattr(ti, "context_carrier") else None, - span_status=ti.span_status, - ) - - class TaskInstanceNote(Base): """For storage of arbitrary notes concerning the task instance.""" diff --git a/airflow-core/src/airflow/models/taskinstancehistory.py b/airflow-core/src/airflow/models/taskinstancehistory.py index c7c6eb79a6008..b5932b28c58a3 100644 --- a/airflow-core/src/airflow/models/taskinstancehistory.py +++ b/airflow-core/src/airflow/models/taskinstancehistory.py @@ -38,8 +38,8 @@ from sqlalchemy.orm import relationship from sqlalchemy_utils import UUIDType +from airflow._shared.timezones import timezone from airflow.models.base import Base, StringID -from airflow.utils import timezone from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.span_status import SpanStatus from airflow.utils.sqlalchemy import ( @@ -52,6 +52,7 @@ if TYPE_CHECKING: from sqlalchemy.orm.session import Session + from airflow.models import DagRun from airflow.models.taskinstance import TaskInstance @@ -113,6 +114,13 @@ class TaskInstanceHistory(Base): foreign_keys=[dag_version_id], ) + dag_run = relationship( + "DagRun", + primaryjoin="and_(TaskInstanceHistory.run_id == DagRun.run_id, DagRun.dag_id == TaskInstanceHistory.dag_id)", + viewonly=True, + foreign_keys=[run_id, dag_id], + ) + def __init__( self, ti: TaskInstance, @@ -154,6 +162,11 @@ def __init__( Index("idx_tih_dag_run", dag_id, run_id), ) + @property + def id(self) -> str: + """Alias for primary key field to support TaskInstance.""" + return self.task_instance_id + @staticmethod @provide_session def record_ti(ti: TaskInstance, session: Session = NEW_SESSION) -> None: @@ -176,3 +189,8 @@ def record_ti(ti: TaskInstance, session: Session = NEW_SESSION) -> None: ti.set_duration() ti_history = TaskInstanceHistory(ti, state=ti_history_state) session.add(ti_history) + + @provide_session + def get_dagrun(self, session: Session = NEW_SESSION) -> DagRun: + """Return the DagRun for this TaskInstanceHistory, matching TaskInstance.""" + return self.dag_run diff --git a/airflow-core/src/airflow/models/tasklog.py b/airflow-core/src/airflow/models/tasklog.py index d55eb94a266d7..d9a5c57c30ac5 100644 --- a/airflow-core/src/airflow/models/tasklog.py +++ b/airflow-core/src/airflow/models/tasklog.py @@ -19,8 +19,8 @@ from sqlalchemy import Column, Integer, Text +from airflow._shared.timezones import timezone from airflow.models.base import Base -from airflow.utils import timezone from airflow.utils.sqlalchemy import UtcDateTime diff --git a/airflow-core/src/airflow/models/taskmap.py b/airflow-core/src/airflow/models/taskmap.py index f0fd4c0231b70..e48c53aa4034e 100644 --- a/airflow-core/src/airflow/models/taskmap.py +++ b/airflow-core/src/airflow/models/taskmap.py @@ -36,7 +36,9 @@ from sqlalchemy.orm import Session from airflow.models.dag import DAG as SchedulerDAG + from airflow.models.mappedoperator import MappedOperator from airflow.models.taskinstance import TaskInstance + from airflow.serialization.serialized_objects import SerializedBaseOperator class TaskMapVariant(enum.Enum): @@ -122,7 +124,13 @@ def variant(self) -> TaskMapVariant: return TaskMapVariant.DICT @classmethod - def expand_mapped_task(cls, task, run_id: str, *, session: Session) -> tuple[Sequence[TaskInstance], int]: + def expand_mapped_task( + cls, + task: SerializedBaseOperator | MappedOperator, + run_id: str, + *, + session: Session, + ) -> tuple[Sequence[TaskInstance], int]: """ Create the mapped task instances for mapped task. @@ -130,20 +138,19 @@ def expand_mapped_task(cls, task, run_id: str, *, session: Session) -> tuple[Seq :return: The newly created mapped task instances (if any) in ascending order by map index, and the maximum map index value. """ - from airflow.models.baseoperator import BaseOperator as DBBaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import MappedOperator, get_mapped_ti_count from airflow.models.taskinstance import TaskInstance - from airflow.sdk.bases.operator import BaseOperator - from airflow.sdk.definitions.mappedoperator import MappedOperator + from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.settings import task_instance_mutation_hook - if not isinstance(task, (BaseOperator, MappedOperator)): + if not isinstance(task, (MappedOperator, SerializedBaseOperator)): raise RuntimeError( f"cannot expand unrecognized operator type {type(task).__module__}.{type(task).__name__}" ) try: - total_length: int | None = DBBaseOperator.get_mapped_ti_count(task, run_id, session=session) + total_length: int | None = get_mapped_ti_count(task, run_id, session=session) except NotFullyPopulated as e: if not task.dag or not task.dag.partial: task.log.error( diff --git a/airflow-core/src/airflow/models/taskmixin.py b/airflow-core/src/airflow/models/taskmixin.py index 7aa8f63ba3c9d..61494c70f19f2 100644 --- a/airflow-core/src/airflow/models/taskmixin.py +++ b/airflow-core/src/airflow/models/taskmixin.py @@ -19,7 +19,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from airflow.typing_compat import TypeAlias + from typing import TypeAlias import airflow.sdk.definitions._internal.mixins import airflow.sdk.definitions._internal.node diff --git a/airflow-core/src/airflow/models/trigger.py b/airflow-core/src/airflow/models/trigger.py index 8bcff1d470e82..ded88d1d2f899 100644 --- a/airflow-core/src/airflow/models/trigger.py +++ b/airflow-core/src/airflow/models/trigger.py @@ -25,25 +25,24 @@ from typing import TYPE_CHECKING, Any from sqlalchemy import Column, Integer, String, Text, delete, func, or_, select, update -from sqlalchemy.orm import relationship, selectinload +from sqlalchemy.orm import Session, relationship, selectinload from sqlalchemy.sql.functions import coalesce +from airflow._shared.timezones import timezone from airflow.assets.manager import AssetManager from airflow.models.asset import asset_trigger_association_table from airflow.models.base import Base from airflow.models.taskinstance import TaskInstance -from airflow.triggers import base as events -from airflow.utils import timezone +from airflow.triggers.base import BaseTaskEndEvent from airflow.utils.retries import run_with_db_retries from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime, with_row_locks from airflow.utils.state import TaskInstanceState if TYPE_CHECKING: - from sqlalchemy.orm import Session from sqlalchemy.sql import Select - from airflow.triggers.base import BaseTrigger + from airflow.triggers.base import BaseTrigger, TriggerEvent TRIGGER_FAIL_REPR = "__fail__" """String value to represent trigger failure. @@ -107,6 +106,8 @@ class Trigger(Base): assets = relationship("AssetModel", secondary=asset_trigger_association_table, back_populates="triggers") + deadline = relationship("Deadline", back_populates="trigger", uselist=False) + def __init__( self, classpath: str, @@ -188,10 +189,15 @@ def bulk_fetch(cls, ids: Iterable[int], session: Session = NEW_SESSION) -> dict[ @classmethod @provide_session - def fetch_trigger_ids_with_asset(cls, session: Session = NEW_SESSION) -> set[str]: - """Fetch all the trigger IDs associated with at least one asset.""" - query = select(asset_trigger_association_table.columns.trigger_id) - return {trigger_id for trigger_id in session.scalars(query)} + def fetch_trigger_ids_with_non_task_associations(cls, session: Session = NEW_SESSION) -> set[str]: + """Fetch all trigger IDs actively associated with non-task entities like assets and deadlines.""" + from airflow.models import Deadline + + query = select(asset_trigger_association_table.columns.trigger_id).union_all( + select(Deadline.trigger_id).where(Deadline.trigger_id.is_not(None)) + ) + + return set(session.scalars(query)) @classmethod @provide_session @@ -213,10 +219,10 @@ def clean_unused(cls, session: Session = NEW_SESSION) -> None: .values(trigger_id=None) ) - # Get all triggers that have no task instances and assets depending on them and delete them + # Get all triggers that have no task instances, assets, or deadlines depending on them and delete them ids = ( select(cls.id) - .where(~cls.assets.any()) + .where(~cls.assets.any(), ~cls.deadline.has()) .join(TaskInstance, cls.id == TaskInstance.trigger_id, isouter=True) .group_by(cls.id) .having(func.count(TaskInstance.trigger_id) == 0) @@ -230,7 +236,7 @@ def clean_unused(cls, session: Session = NEW_SESSION) -> None: @classmethod @provide_session - def submit_event(cls, trigger_id, event: events.TriggerEvent, session: Session = NEW_SESSION) -> None: + def submit_event(cls, trigger_id, event: TriggerEvent, session: Session = NEW_SESSION) -> None: """ Fire an event. @@ -256,6 +262,8 @@ def submit_event(cls, trigger_id, event: events.TriggerEvent, session: Session = extra={"from_trigger": True, "payload": event.payload}, session=session, ) + if trigger.deadline: + trigger.deadline.handle_callback_event(event, session) @classmethod @provide_session @@ -348,31 +356,37 @@ def get_sorted_triggers(cls, capacity: int, alive_triggerer_ids: list[int] | Sel :param alive_triggerer_ids: The alive triggerer ids as a list or a select query. :param session: The database session. """ - query = with_row_locks( + result: list[int] = [] + + # Add triggers associated to deadlines first, then tasks, then assets + # It prioritizes deadline triggers, then DAGs over event driven scheduling which is fair + queries = [ + # Deadline triggers + select(cls.id).where(cls.deadline.has()).order_by(cls.created_date), + # Task Instance triggers select(cls.id) + .prefix_with("STRAIGHT_JOIN", dialect="mysql") .join(TaskInstance, cls.id == TaskInstance.trigger_id, isouter=False) .where(or_(cls.triggerer_id.is_(None), cls.triggerer_id.not_in(alive_triggerer_ids))) - .order_by(coalesce(TaskInstance.priority_weight, 0).desc(), cls.created_date) - .limit(capacity), - session, - skip_locked=True, - ) - ti_triggers = session.execute(query).all() + .order_by(coalesce(TaskInstance.priority_weight, 0).desc(), cls.created_date), + # Asset triggers + select(cls.id).where(cls.assets.any()).order_by(cls.created_date), + ] - query = with_row_locks( - select(cls.id).where(cls.assets.any()).order_by(cls.created_date).limit(capacity), - session, - skip_locked=True, - ) - asset_triggers = session.execute(query).all() + # Process each query while avoiding unnecessary queries when capacity is reached + for query in queries: + remaining_capacity = capacity - len(result) + if remaining_capacity <= 0: + break + + locked_query = with_row_locks(query.limit(remaining_capacity), session, skip_locked=True) + result.extend(session.execute(locked_query).all()) - # Add triggers associated to assets after triggers associated to tasks - # It prioritizes DAGs over event driven scheduling which is fair - return ti_triggers + asset_triggers + return result @singledispatch -def handle_event_submit(event: events.TriggerEvent, *, task_instance: TaskInstance, session: Session) -> None: +def handle_event_submit(event: TriggerEvent, *, task_instance: TaskInstance, session: Session) -> None: """ Handle the submit event for a given task instance. @@ -403,10 +417,8 @@ def handle_event_submit(event: events.TriggerEvent, *, task_instance: TaskInstan session.flush() -@handle_event_submit.register(events.BaseTaskEndEvent) -def _process_BaseTaskEndEvent( - event: events.BaseTaskEndEvent, *, task_instance: TaskInstance, session: Session -) -> None: +@handle_event_submit.register +def _(event: BaseTaskEndEvent, *, task_instance: TaskInstance, session: Session) -> None: """ Submit event for the given task instance. diff --git a/airflow-core/src/airflow/models/variable.py b/airflow-core/src/airflow/models/variable.py index 5cf9538aa8e75..2c9138f7c695d 100644 --- a/airflow-core/src/airflow/models/variable.py +++ b/airflow-core/src/airflow/models/variable.py @@ -31,8 +31,8 @@ from airflow.configuration import ensure_secrets_loaded from airflow.models.base import ID_LEN, Base from airflow.models.crypto import get_fernet +from airflow.sdk import SecretCache from airflow.sdk.execution_time.secrets_masker import mask_secret -from airflow.secrets.cache import SecretCache from airflow.secrets.metastore import MetastoreBackend from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.session import create_session @@ -186,7 +186,7 @@ def set( """ Set a value for an Airflow Variable with a given Key. - This operation overwrites an existing variable. + This operation overwrites an existing variable using the session's dialect-specific upsert operation. :param key: Variable Key :param value: Value to set for the Variable @@ -231,9 +231,47 @@ def set( ctx = create_session() with ctx as session: - Variable.delete(key, session=session) - session.add(Variable(key=key, val=stored_value, description=description)) - session.flush() + new_variable = Variable(key=key, val=stored_value, description=description) + + val = new_variable._val + is_encrypted = new_variable.is_encrypted + + # Import dialect-specific insert function + if (dialect_name := session.get_bind().dialect.name) == "postgresql": + from sqlalchemy.dialects.postgresql import insert + elif dialect_name == "mysql": + from sqlalchemy.dialects.mysql import insert + else: + from sqlalchemy.dialects.sqlite import insert + + # Create the insert statement (common for all dialects) + stmt = insert(Variable).values( + key=key, + val=val, + description=description, + is_encrypted=is_encrypted, + ) + + # Apply dialect-specific upsert + if dialect_name == "mysql": + # MySQL: ON DUPLICATE KEY UPDATE + stmt = stmt.on_duplicate_key_update( + val=val, + description=description, + is_encrypted=is_encrypted, + ) + else: + # PostgreSQL and SQLite: ON CONFLICT DO UPDATE + stmt = stmt.on_conflict_do_update( + index_elements=["key"], + set_=dict( + val=val, + description=description, + is_encrypted=is_encrypted, + ), + ) + + session.execute(stmt) # invalidate key in cache for faster propagation # we cannot save the value set because it's possible that it's shadowed by a custom backend # (see call to check_for_write_conflict above) diff --git a/airflow-core/src/airflow/models/xcom.py b/airflow-core/src/airflow/models/xcom.py index f828d9b9343c5..a7301578ed2bd 100644 --- a/airflow-core/src/airflow/models/xcom.py +++ b/airflow-core/src/airflow/models/xcom.py @@ -39,21 +39,14 @@ from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.orm import Query, relationship +from airflow._shared.timezones import timezone from airflow.models.base import COLLATION_ARGS, ID_LEN, TaskInstanceDependencies -from airflow.utils import timezone from airflow.utils.db import LazySelectSequence from airflow.utils.helpers import is_container from airflow.utils.json import XComDecoder, XComEncoder from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime -# XCom constants below are needed for providers backward compatibility, -# which should import the constants directly after apache-airflow>=2.6.0 -from airflow.utils.xcom import ( - MAX_XCOM_SIZE, # noqa: F401 - XCOM_RETURN_KEY, -) - log = logging.getLogger(__name__) if TYPE_CHECKING: @@ -62,6 +55,9 @@ from sqlalchemy.sql.expression import Select, TextClause +XCOM_RETURN_KEY = "return_value" + + class XComModel(TaskInstanceDependencies): """XCom model class. Contains table and some utilities.""" @@ -180,6 +176,9 @@ def set( """ from airflow.models.dagrun import DagRun + if not key: + raise ValueError(f"XCom key must be a non-empty string. Received: {key!r}") + if not run_id: raise ValueError(f"run_id must be passed. Passed run_id={run_id}") @@ -278,6 +277,9 @@ def get_many( """ from airflow.models.dagrun import DagRun + if key is not None and not key: + raise ValueError(f"XCom key must be a non-empty string. Received: {key!r}") + if not run_id: raise ValueError(f"run_id must be passed. Passed run_id={run_id}") diff --git a/airflow-core/src/airflow/models/xcom_arg.py b/airflow-core/src/airflow/models/xcom_arg.py index cfda9295cec26..1109f03bb1f99 100644 --- a/airflow-core/src/airflow/models/xcom_arg.py +++ b/airflow-core/src/airflow/models/xcom_arg.py @@ -25,6 +25,7 @@ from sqlalchemy import func, or_, select from sqlalchemy.orm import Session +from airflow.models.xcom import XCOM_RETURN_KEY from airflow.sdk.definitions._internal.types import ArgNotSet from airflow.sdk.definitions.mappedoperator import MappedOperator from airflow.sdk.definitions.xcom_arg import ( @@ -33,7 +34,6 @@ from airflow.utils.db import exists_query from airflow.utils.state import State from airflow.utils.types import NOTSET -from airflow.utils.xcom import XCOM_RETURN_KEY __all__ = ["XComArg", "get_task_map_length"] diff --git a/airflow-core/src/airflow/notifications/__init__.py b/airflow-core/src/airflow/notifications/__init__.py new file mode 100644 index 0000000000000..e5acbbb18ea3b --- /dev/null +++ b/airflow-core/src/airflow/notifications/__init__.py @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Airflow Notifiers.""" + +from __future__ import annotations + +from airflow.utils.deprecation_tools import add_deprecated_classes + +__deprecated_classes = { + "basenotifier": { + "BaseNotifier": "airflow.sdk.bases.notifier.BaseNotifier", + }, +} +add_deprecated_classes(__deprecated_classes, __name__) diff --git a/airflow-core/src/airflow/operators/__init__.py b/airflow-core/src/airflow/operators/__init__.py index df46ec18164aa..70d2da8f06b57 100644 --- a/airflow-core/src/airflow/operators/__init__.py +++ b/airflow-core/src/airflow/operators/__init__.py @@ -35,6 +35,7 @@ "ExternalPythonOperator": "airflow.providers.standard.operators.python.ExternalPythonOperator", "BranchExternalPythonOperator": "airflow.providers.standard.operators.python.BranchExternalPythonOperator", "BranchPythonVirtualenvOperator": "airflow.providers.standard.operators.python.BranchPythonVirtualenvOperator", + "get_current_context": "airflow.sdk.get_current_context", }, "bash":{ "BashOperator": "airflow.providers.standard.operators.bash.BashOperator", @@ -57,6 +58,9 @@ "empty": { "EmptyOperator": "airflow.providers.standard.operators.empty.EmptyOperator", }, + "email": { + "EmailOperator": "airflow.providers.smtp.operators.smtp.EmailOperator", + }, "smooth": { "SmoothOperator": "airflow.providers.standard.operators.smooth.SmoothOperator", }, diff --git a/airflow-core/src/airflow/plugins_manager.py b/airflow-core/src/airflow/plugins_manager.py index 34d8f100d7546..20377e6165cd8 100644 --- a/airflow-core/src/airflow/plugins_manager.py +++ b/airflow-core/src/airflow/plugins_manager.py @@ -69,6 +69,8 @@ flask_blueprints: list[Any] | None = None fastapi_apps: list[Any] | None = None fastapi_root_middlewares: list[Any] | None = None +external_views: list[Any] | None = None +react_apps: list[Any] | None = None menu_links: list[Any] | None = None flask_appbuilder_views: list[Any] | None = None flask_appbuilder_menu_links: list[Any] | None = None @@ -90,6 +92,8 @@ "flask_blueprints", "fastapi_apps", "fastapi_root_middlewares", + "external_views", + "react_apps", "menu_links", "appbuilder_views", "appbuilder_menu_items", @@ -129,7 +133,7 @@ class EntryPointSource(AirflowPluginSource): """Class used to define Plugins loaded from entrypoint.""" def __init__(self, entrypoint: metadata.EntryPoint, dist: metadata.Distribution): - self.dist = dist.metadata["Name"] + self.dist = dist.metadata["Name"] # type: ignore[index] self.version = dist.version self.entrypoint = str(entrypoint) @@ -154,6 +158,8 @@ class AirflowPlugin: flask_blueprints: list[Any] = [] fastapi_apps: list[Any] = [] fastapi_root_middlewares: list[Any] = [] + external_views: list[Any] = [] + react_apps: list[Any] = [] menu_links: list[Any] = [] appbuilder_views: list[Any] = [] appbuilder_menu_items: list[Any] = [] @@ -365,8 +371,66 @@ def ensure_plugins_loaded(): log.debug("Loading %d plugin(s) took %.2f seconds", len(plugins), timer.duration) +def initialize_ui_plugins(): + """Collect extension points for the UI.""" + global plugins + global external_views + global react_apps + + if external_views is not None and react_apps is not None: + return + + ensure_plugins_loaded() + + if plugins is None: + raise AirflowPluginException("Can't load plugins.") + + log.debug("Initialize UI plugin") + + seen_url_route = {} + external_views = [] + react_apps = [] + + for plugin in plugins: + for external_view in plugin.external_views: + url_route = external_view["url_route"] + if url_route is not None and url_route in seen_url_route: + log.warning( + "Plugin '%s' has an external view with an URL route '%s' " + "that conflicts with another plugin '%s'. The view will not be loaded.", + plugin.name, + url_route, + seen_url_route[url_route], + ) + # Mutate in place the plugin's external views to remove the conflicting view + # because some function still access the plugin's external views and not the + # global `external_views` variable. (get_plugin_info, for example) + plugin.external_views.remove(external_view) + continue + external_views.append(external_view) + seen_url_route[url_route] = plugin.name + + for react_app in plugin.react_apps: + url_route = react_app["url_route"] + if url_route is not None and url_route in seen_url_route: + log.warning( + "Plugin '%s' has a React App with an URL route '%s' " + "that conflicts with another plugin '%s'. The React App will not be loaded.", + plugin.name, + url_route, + seen_url_route[url_route], + ) + # Mutate in place the plugin's React Apps to remove the conflicting app + # because some function still access the plugin's React Apps and not the + # global `react_apps` variable. (get_plugin_info, for example) + plugin.react_apps.remove(react_app) + continue + react_apps.append(react_app) + seen_url_route[url_route] = plugin.name + + def initialize_flask_plugins(): - """Collect extension points for WEB UI.""" + """Collect flask extension points for WEB UI (legacy).""" global plugins global flask_blueprints global flask_appbuilder_views @@ -384,7 +448,7 @@ def initialize_flask_plugins(): if plugins is None: raise AirflowPluginException("Can't load plugins.") - log.debug("Initialize Web UI plugin") + log.debug("Initialize legacy Web UI plugin") flask_blueprints = [] flask_appbuilder_views = [] @@ -507,7 +571,7 @@ def integrate_macros_plugins() -> None: global plugins global macros_modules - from airflow import macros + from airflow.sdk.execution_time import macros if macros_modules is not None: return @@ -517,7 +581,7 @@ def integrate_macros_plugins() -> None: if plugins is None: raise AirflowPluginException("Can't load plugins.") - log.debug("Integrate DAG plugins") + log.debug("Integrate Macros plugins") macros_modules = [] @@ -525,7 +589,7 @@ def integrate_macros_plugins() -> None: if plugin.name is None: raise AirflowPluginException("Invalid plugin name") - macros_module = make_module(f"airflow.macros.{plugin.name}", plugin.macros) + macros_module = make_module(f"airflow.sdk.execution_time.macros.{plugin.name}", plugin.macros) if macros_module: macros_modules.append(macros_module) @@ -560,6 +624,7 @@ def get_plugin_info(attrs_to_dump: Iterable[str] | None = None) -> list[dict[str integrate_macros_plugins() initialize_flask_plugins() initialize_fastapi_plugins() + initialize_ui_plugins() initialize_extra_operators_links_plugins() if not attrs_to_dump: attrs_to_dump = PLUGINS_ATTRIBUTES_TO_DUMP diff --git a/airflow-core/src/airflow/policies.py b/airflow-core/src/airflow/policies.py index 6e995733ca0d1..933ccaa24522c 100644 --- a/airflow-core/src/airflow/policies.py +++ b/airflow-core/src/airflow/policies.py @@ -27,9 +27,9 @@ __all__: list[str] = ["hookimpl"] if TYPE_CHECKING: - from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG from airflow.models.taskinstance import TaskInstance + from airflow.serialization.serialized_objects import SerializedBaseOperator as BaseOperator @local_settings_hookspec diff --git a/airflow-core/src/airflow/provider.yaml.schema.json b/airflow-core/src/airflow/provider.yaml.schema.json index 75ba892569b4e..c35e0d9de25e7 100644 --- a/airflow-core/src/airflow/provider.yaml.schema.json +++ b/airflow-core/src/airflow/provider.yaml.schema.json @@ -467,6 +467,18 @@ } } }, + "queues": { + "type": "array", + "description": "Message Queues exposed by the provider", + "items": { + "name": { + "type": "string" + }, + "message-queue-class": { + "type": "string" + } + } + }, "source-date-epoch": { "type": "integer", "description": "Source date epoch - seconds since epoch (gmtime) when the release documentation was prepared. Used to generate reproducible package builds with flint.", diff --git a/airflow-core/src/airflow/provider_info.schema.json b/airflow-core/src/airflow/provider_info.schema.json index 1785ba02ed623..3ca9756dfb2f6 100644 --- a/airflow-core/src/airflow/provider_info.schema.json +++ b/airflow-core/src/airflow/provider_info.schema.json @@ -416,6 +416,18 @@ "description": "Class to instantiate the plugin" } } + }, + "queues": { + "type": "array", + "description": "Message Queues exposed by the provider", + "items": { + "name": { + "type": "string" + }, + "message-queue-class": { + "type": "string" + } + } } }, "definitions": { diff --git a/airflow-core/src/airflow/providers_manager.py b/airflow-core/src/airflow/providers_manager.py index 85062e9f75e63..20fc79bd7fac5 100644 --- a/airflow-core/src/airflow/providers_manager.py +++ b/airflow-core/src/airflow/providers_manager.py @@ -26,19 +26,16 @@ import logging import traceback import warnings -from collections.abc import MutableMapping +from collections.abc import Callable, MutableMapping from dataclasses import dataclass from functools import wraps from importlib.resources import files as resource_files from time import perf_counter -from typing import TYPE_CHECKING, Any, Callable, NamedTuple, TypeVar +from typing import TYPE_CHECKING, Any, NamedTuple, ParamSpec, TypeVar from packaging.utils import canonicalize_name from airflow.exceptions import AirflowOptionalProviderFeatureException -from airflow.providers.standard.hooks.filesystem import FSHook -from airflow.providers.standard.hooks.package_index import PackageIndexHook -from airflow.typing_compat import ParamSpec from airflow.utils.entry_points import entry_points_with_dist from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.module_loading import import_string @@ -85,7 +82,7 @@ def ensure_prefix(field): if TYPE_CHECKING: from urllib.parse import SplitResult - from airflow.hooks.base import BaseHook + from airflow.sdk import BaseHook from airflow.sdk.bases.decorator import TaskDecorator from airflow.sdk.definitions.asset import Asset @@ -395,13 +392,11 @@ def __init__(self): self._initialized_cache: dict[str, bool] = {} # Keeps dict of providers keyed by module name self._provider_dict: dict[str, ProviderInfo] = {} - # Keeps dict of hooks keyed by connection type - self._hooks_dict: dict[str, HookInfo] = {} self._fs_set: set[str] = set() self._asset_uri_handlers: dict[str, Callable[[SplitResult], SplitResult]] = {} self._asset_factories: dict[str, Callable[..., Asset]] = {} self._asset_to_openlineage_converters: dict[str, Callable] = {} - self._taskflow_decorators: dict[str, Callable] = LazyDictWithCache() # type: ignore[assignment] + self._taskflow_decorators: dict[str, Callable] = LazyDictWithCache() # keeps mapping between connection_types and hook class, package they come from self._hook_provider_dict: dict[str, HookClassProvider] = {} self._dialect_provider_dict: dict[str, DialectInfo] = {} @@ -416,6 +411,7 @@ def __init__(self): self._auth_manager_class_name_set: set[str] = set() self._secrets_backend_class_name_set: set[str] = set() self._executor_class_name_set: set[str] = set() + self._queue_class_name_set: set[str] = set() self._provider_configs: dict[str, dict[str, Any]] = {} self._trigger_info_set: set[TriggerInfo] = set() self._notification_info_set: set[NotificationInfo] = set() @@ -442,19 +438,17 @@ def _init_airflow_core_hooks(self): connection_type=None, connection_testable=False, ) - for cls in [FSHook, PackageIndexHook]: - package_name = cls.__module__ - hook_class_name = f"{cls.__module__}.{cls.__name__}" - hook_info = self._import_hook( + for conn_type, class_name in ( + ("fs", "airflow.providers.standard.hooks.filesystem.FSHook"), + ("package_index", "airflow.providers.standard.hooks.package_index.PackageIndexHook"), + ): + self._hooks_lazy_dict[conn_type] = functools.partial( + self._import_hook, connection_type=None, + package_name="apache-airflow-providers-standard", + hook_class_name=class_name, provider_info=None, - hook_class_name=hook_class_name, - package_name=package_name, - ) - self._hook_provider_dict[hook_info.connection_type] = HookClassProvider( - hook_class_name=hook_class_name, package_name=package_name ) - self._hooks_lazy_dict[hook_info.connection_type] = hook_info @provider_info_cache("list") def initialize_providers_list(self): @@ -486,6 +480,7 @@ def _verify_all_providers_all_compatible(self): @provider_info_cache("hooks") def initialize_providers_hooks(self): """Lazy initialization of providers hooks.""" + self._init_airflow_core_hooks() self.initialize_providers_list() self._discover_hooks() self._hook_provider_dict = dict(sorted(self._hook_provider_dict.items())) @@ -533,6 +528,12 @@ def initialize_providers_executors(self): self.initialize_providers_list() self._discover_executors() + @provider_info_cache("queues") + def initialize_providers_queues(self): + """Lazy initialization of providers queue information.""" + self.initialize_providers_list() + self._discover_queues() + @provider_info_cache("notifications") def initialize_providers_notifications(self): """Lazy initialization of providers notifications information.""" @@ -585,6 +586,8 @@ def _discover_all_providers_from_packages(self) -> None: and verifies only the subset of fields that are needed at runtime. """ for entry_point, dist in entry_points_with_dist("apache_airflow_provider"): + if not dist.metadata: + continue package_name = canonicalize_name(dist.metadata["name"]) if package_name in self._provider_dict: continue @@ -1091,6 +1094,14 @@ def _discover_executors(self) -> None: if _correctness_check(provider_package, executors_class_name, provider): self._executor_class_name_set.add(executors_class_name) + def _discover_queues(self) -> None: + """Retrieve all queues defined in the providers.""" + for provider_package, provider in self._provider_dict.items(): + if provider.data.get("queues"): + for queue_class_name in provider.data["queues"]: + if _correctness_check(provider_package, queue_class_name, provider): + self._queue_class_name_set.add(queue_class_name) + def _discover_config(self) -> None: """Retrieve all configs defined in the providers.""" for provider_package, provider in self._provider_dict.items(): @@ -1221,6 +1232,11 @@ def executor_class_names(self) -> list[str]: self.initialize_providers_executors() return sorted(self._executor_class_name_set) + @property + def queue_class_names(self) -> list[str]: + self.initialize_providers_queues() + return sorted(self._queue_class_name_set) + @property def filesystem_module_names(self) -> list[str]: self.initialize_providers_filesystems() @@ -1255,7 +1271,6 @@ def already_initialized_provider_configs(self) -> list[tuple[str, dict[str, Any] def _cleanup(self): self._initialized_cache.clear() self._provider_dict.clear() - self._hooks_dict.clear() self._fs_set.clear() self._taskflow_decorators.clear() self._hook_provider_dict.clear() @@ -1268,9 +1283,11 @@ def _cleanup(self): self._auth_manager_class_name_set.clear() self._secrets_backend_class_name_set.clear() self._executor_class_name_set.clear() + self._queue_class_name_set.clear() self._provider_configs.clear() self._trigger_info_set.clear() self._notification_info_set.clear() self._plugins_set.clear() + self._initialized = False self._initialization_stack_trace = None diff --git a/airflow-core/src/airflow/secrets/__init__.py b/airflow-core/src/airflow/secrets/__init__.py index b502f1385f8b0..5ff034b247ec1 100644 --- a/airflow-core/src/airflow/secrets/__init__.py +++ b/airflow-core/src/airflow/secrets/__init__.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. """ -Secrets framework provides means of getting connection objects from various sources. +The Secrets framework provides a means of getting connection objects from various sources. The following sources are available: @@ -27,6 +27,8 @@ from __future__ import annotations +from airflow.utils.deprecation_tools import add_deprecated_classes + __all__ = ["BaseSecretsBackend", "DEFAULT_SECRETS_SEARCH_PATH", "DEFAULT_SECRETS_SEARCH_PATH_WORKERS"] from airflow.secrets.base_secrets import BaseSecretsBackend @@ -39,3 +41,11 @@ DEFAULT_SECRETS_SEARCH_PATH_WORKERS = [ "airflow.secrets.environment_variables.EnvironmentVariablesBackend", ] + + +__deprecated_classes = { + "cache": { + "SecretCache": "airflow.sdk.execution_time.cache.SecretCache", + }, +} +add_deprecated_classes(__deprecated_classes, __name__) diff --git a/airflow-core/src/airflow/secrets/local_filesystem.py b/airflow-core/src/airflow/secrets/local_filesystem.py index 60e78f59e5f89..e3d1a98d68947 100644 --- a/airflow-core/src/airflow/secrets/local_filesystem.py +++ b/airflow-core/src/airflow/secrets/local_filesystem.py @@ -21,17 +21,19 @@ import json import logging -import os from collections import defaultdict from inspect import signature from json import JSONDecodeError +from pathlib import Path from typing import TYPE_CHECKING, Any from airflow.exceptions import ( AirflowException, AirflowFileParseException, + AirflowUnsupportedFileTypeException, ConnectionNotUnique, FileSyntaxError, + VariableNotUnique, ) from airflow.secrets.base_secrets import BaseSecretsBackend from airflow.utils import yaml @@ -61,6 +63,7 @@ def _parse_env_file(file_path: str) -> tuple[dict[str, list[str]], list[FileSynt :param file_path: The location of the file that will be processed. :return: Tuple with mapping of key and list of values and list of syntax errors + :raises FileSyntaxError: If the file has syntax errors. """ with open(file_path) as f: content = f.read() @@ -86,13 +89,22 @@ def _parse_env_file(file_path: str) -> tuple[dict[str, list[str]], list[FileSynt ) continue - if not value: + if not key: errors.append( FileSyntaxError( line_no=line_no, message="Invalid line format. Key is empty.", ) ) + continue + if not value: + errors.append( + FileSyntaxError( + line_no=line_no, + message="Invalid line format. Value is empty.", + ) + ) + continue secrets[key].append(value) return secrets, errors @@ -156,20 +168,17 @@ def _parse_secret_file(file_path: str) -> dict[str, Any]: :param file_path: The location of the file that will be processed. :return: Map of secret key (e.g. connection ID) and value. + :raises AirflowUnsupportedFileTypeException: If the file type is not supported. + :raises AirflowFileParseException: If the file has syntax errors. """ - if not os.path.exists(file_path): - raise AirflowException( - f"File {file_path} was not found. Check the configuration of your Secrets backend." - ) - log.debug("Parsing file: %s", file_path) - ext = file_path.rsplit(".", 2)[-1].lower() + ext = Path(file_path).suffix.lstrip(".").lower() if ext not in FILE_PARSERS: - raise AirflowException( - "Unsupported file format. The file must have one of the following extensions: " - ".env .json .yaml .yml" + extensions = " ".join([f".{ext}" for ext in sorted(FILE_PARSERS.keys())]) + raise AirflowUnsupportedFileTypeException( + f"Unsupported file format. The file must have one of the following extensions: {extensions}" ) secrets, parse_errors = FILE_PARSERS[ext](file_path) @@ -235,7 +244,7 @@ def load_variables(file_path: str) -> dict[str, str]: secrets = _parse_secret_file(file_path) invalid_keys = [key for key, values in secrets.items() if isinstance(values, list) and len(values) != 1] if invalid_keys: - raise AirflowException(f'The "{file_path}" file contains multiple values for keys: {invalid_keys}') + raise VariableNotUnique(f'The "{file_path}" file contains multiple values for keys: {invalid_keys}') variables = {key: values[0] if isinstance(values, list) else values for key, values in secrets.items()} log.debug("Loaded %d variables: ", len(variables)) return variables diff --git a/airflow-core/src/airflow/security/permissions.py b/airflow-core/src/airflow/security/permissions.py index 647bcf0b0c6e3..6ae47faefbecc 100644 --- a/airflow-core/src/airflow/security/permissions.py +++ b/airflow-core/src/airflow/security/permissions.py @@ -16,8 +16,18 @@ # under the License. from __future__ import annotations +import warnings from typing import TypedDict +from airflow.exceptions import RemovedInAirflow4Warning + +warnings.warn( + "The airflow.security.permissions module is deprecated; please see https://airflow.apache.org/docs/apache-airflow/stable/security/deprecated_permissions.html", + RemovedInAirflow4Warning, + stacklevel=2, +) + + # Resource Constants RESOURCE_ACTION = "Permissions" RESOURCE_ADMIN_MENU = "Admin" @@ -39,6 +49,7 @@ RESOURCE_ASSET_ALIAS = "Asset Aliases" RESOURCE_DOCS = "Documentation" RESOURCE_DOCS_MENU = "Docs" +RESOURCE_HITL_DETAIL = "HITL Detail" RESOURCE_IMPORT_ERROR = "ImportError" RESOURCE_JOB = "Jobs" RESOURCE_MY_PASSWORD = "My Password" @@ -95,13 +106,7 @@ class ResourceDetails(TypedDict): def resource_name(root_dag_id: str, resource: str) -> str: - """ - Return the resource name for a DAG id. - - Note that since a sub-DAG should follow the permission of its - parent DAG, you should pass ``DagModel.root_dag_id`` to this function, - for a subdag. A normal dag should pass the ``DagModel.dag_id``. - """ + """Return the resource name for a DAG id.""" if root_dag_id in RESOURCE_DETAILS_MAP.keys(): return root_dag_id if root_dag_id.startswith(tuple(PREFIX_RESOURCES_MAP.keys())): @@ -113,10 +118,6 @@ def resource_name_for_dag(root_dag_id: str) -> str: """ Return the resource name for a DAG id. - Note that since a sub-DAG should follow the permission of its - parent DAG, you should pass ``DagModel.root_dag_id`` to this function, - for a subdag. A normal dag should pass the ``DagModel.dag_id``. - Note: This function is kept for backwards compatibility. """ if root_dag_id == RESOURCE_DAG: diff --git a/airflow-core/src/airflow/sensors/__init__.py b/airflow-core/src/airflow/sensors/__init__.py index f174fcb55ed36..db378f4550324 100644 --- a/airflow-core/src/airflow/sensors/__init__.py +++ b/airflow-core/src/airflow/sensors/__init__.py @@ -26,13 +26,17 @@ from airflow.utils.deprecation_tools import add_deprecated_classes -# TODO: Add definition from Task SDK here and remove `base.py` file __deprecated_classes = { + "base": { + "BaseSensorOperator": "airflow.sdk.bases.sensor.BaseSensorOperator", + "PokeReturnValue": "airflow.sdk.bases.sensor.PokeReturnValue", + "poke_mode_only": "airflow.sdk.bases.sensor.poke_mode_only", + }, "python":{ "PythonSensor": "airflow.providers.standard.sensors.python.PythonSensor", }, "bash":{ - "BashSensor": "airflow.providers.standard.sensor.bash.BashSensor", + "BashSensor": "airflow.providers.standard.sensors.bash.BashSensor", }, "date_time":{ "DateTimeSensor": "airflow.providers.standard.sensors.date_time.DateTimeSensor", diff --git a/airflow-core/src/airflow/sensors/base.py b/airflow-core/src/airflow/sensors/base.py deleted file mode 100644 index 71ae006f53437..0000000000000 --- a/airflow-core/src/airflow/sensors/base.py +++ /dev/null @@ -1,24 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from airflow.sdk.bases.sensor import ( - BaseSensorOperator as BaseSensorOperator, - PokeReturnValue as PokeReturnValue, - poke_mode_only as poke_mode_only, -) diff --git a/airflow-core/src/airflow/serialization/enums.py b/airflow-core/src/airflow/serialization/enums.py index ab7d59cb21682..11c37b3bd1a4e 100644 --- a/airflow-core/src/airflow/serialization/enums.py +++ b/airflow-core/src/airflow/serialization/enums.py @@ -62,16 +62,10 @@ class DagAttributeTypes(str, Enum): ASSET_REF = "asset_ref" ASSET_UNIQUE_KEY = "asset_unique_key" ASSET_ALIAS_UNIQUE_KEY = "asset_alias_unique_key" - SIMPLE_TASK_INSTANCE = "simple_task_instance" - BASE_JOB = "Job" - TASK_INSTANCE = "task_instance" - DAG_RUN = "dag_run" - DAG_MODEL = "dag_model" - LOG_TEMPLATE = "log_template" CONNECTION = "connection" TASK_CONTEXT = "task_context" ARG_NOT_SET = "arg_not_set" TASK_CALLBACK_REQUEST = "task_callback_request" DAG_CALLBACK_REQUEST = "dag_callback_request" TASK_INSTANCE_KEY = "task_instance_key" - TRIGGER = "trigger" + DEADLINE_ALERT = "deadline_alert" diff --git a/airflow-core/src/airflow/serialization/schema.json b/airflow-core/src/airflow/serialization/schema.json index 0670acd588cbe..aa13f618f844d 100644 --- a/airflow-core/src/airflow/serialization/schema.json +++ b/airflow-core/src/airflow/serialization/schema.json @@ -187,6 +187,12 @@ }, "dag_display_name": { "type" : "string"}, "description": { "type" : "string"}, + "deadline": { + "anyOf": [ + { "$ref": "#/definitions/dict" }, + { "type": "null" } + ] + }, "_concurrency": { "type" : "number"}, "max_active_tasks": { "type" : "number"}, "max_active_runs": { "type" : "number"}, diff --git a/airflow-core/src/airflow/serialization/serde.py b/airflow-core/src/airflow/serialization/serde.py index 0268ad91206d8..6faedd88417a7 100644 --- a/airflow-core/src/airflow/serialization/serde.py +++ b/airflow-core/src/airflow/serialization/serde.py @@ -26,12 +26,13 @@ from fnmatch import fnmatch from importlib import import_module from re import Pattern -from typing import TYPE_CHECKING, Any, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast import attr import airflow.serialization.serializers from airflow.configuration import conf +from airflow.serialization.typing import is_pydantic_model from airflow.stats import Stats from airflow.utils.module_loading import import_string, iter_namespace, qualname @@ -52,12 +53,13 @@ OLD_SOURCE = "__source" OLD_DATA = "__var" OLD_DICT = "dict" +PYDANTIC_MODEL_QUALNAME = "pydantic.main.BaseModel" DEFAULT_VERSION = 0 T = TypeVar("T", bool, float, int, dict, list, str, tuple, set) -U = Union[bool, float, int, dict, list, str, tuple, set] -S = Union[list, tuple, set] +U = bool | float | int | dict | list | str | tuple | set +S = list | tuple | set _serializers: dict[str, ModuleType] = {} _deserializers: dict[str, ModuleType] = {} @@ -118,13 +120,6 @@ def serialize(o: object, depth: int = 0) -> U | None: if o is None: return o - # primitive types are returned as is - if isinstance(o, _primitives): - if isinstance(o, enum.Enum): - return o.value - - return o - if isinstance(o, list): return [serialize(d, depth + 1) for d in o] @@ -145,12 +140,25 @@ def serialize(o: object, depth: int = 0) -> U | None: qn = "builtins.tuple" classname = qn + if is_pydantic_model(o): + # to match the generic Pydantic serializer and deserializer in _serializers and _deserializers + qn = PYDANTIC_MODEL_QUALNAME + # the actual Pydantic model class to encode + classname = qualname(o) + # if there is a builtin serializer available use that if qn in _serializers: data, serialized_classname, version, is_serialized = _serializers[qn].serialize(o) if is_serialized: return encode(classname or serialized_classname, version, serialize(data, depth + 1)) + # primitive types are returned as is + if isinstance(o, _primitives): + if isinstance(o, enum.Enum): + return o.value + + return o + # custom serializers dct = { CLASSNAME: qn, @@ -256,7 +264,10 @@ def deserialize(o: T | None, full=True, type_hint: Any = None) -> object: # registered deserializer if classname in _deserializers: - return _deserializers[classname].deserialize(classname, version, deserialize(value)) + return _deserializers[classname].deserialize(cls, version, deserialize(value)) + if is_pydantic_model(cls): + if PYDANTIC_MODEL_QUALNAME in _deserializers: + return _deserializers[PYDANTIC_MODEL_QUALNAME].deserialize(cls, version, deserialize(value)) # class has deserialization function if hasattr(cls, "deserialize"): @@ -273,7 +284,12 @@ def deserialize(o: T | None, full=True, type_hint: Any = None) -> object: class_version, ) - return cls(**deserialize(value)) + deserialize_value = deserialize(value) + if not isinstance(deserialize_value, dict): + raise TypeError( + f"deserialized value for {classname} is not a dict, got {type(deserialize_value)}" + ) + return cls(**deserialize_value) # type: ignore[operator] # no deserializer available raise TypeError(f"No deserializer found for {classname}") diff --git a/airflow-core/src/airflow/serialization/serialized_objects.py b/airflow-core/src/airflow/serialization/serialized_objects.py index 8428765e2be0c..a8a766e553b31 100644 --- a/airflow-core/src/airflow/serialization/serialized_objects.py +++ b/airflow-core/src/airflow/serialization/serialized_objects.py @@ -25,51 +25,49 @@ import enum import itertools import logging +import math import weakref -from collections.abc import Collection, Generator, Iterable, Mapping +from collections.abc import Collection, Generator, Iterable, Iterator, Mapping, Sequence from functools import cache, cached_property from inspect import signature from textwrap import dedent -from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, TypeAlias, TypeVar, Union, cast, overload import attrs import lazy_object_proxy +import methodtools import pydantic from dateutil import relativedelta from pendulum.tz.timezone import FixedTimezone, Timezone from airflow import macros +from airflow._shared.timezones.timezone import from_timestamp, parse_timezone from airflow.callbacks.callback_requests import DagCallbackRequest, TaskCallbackRequest from airflow.exceptions import AirflowException, SerializationError, TaskDeferred -from airflow.models.baseoperator import BaseOperator from airflow.models.connection import Connection from airflow.models.dag import DAG, _get_model_data_interval from airflow.models.expandinput import ( create_expand_input, ) -from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance from airflow.models.taskinstancekey import TaskInstanceKey from airflow.models.xcom import XComModel from airflow.models.xcom_arg import SchedulerXComArg, deserialize_xcom_arg -from airflow.providers_manager import ProvidersManager -from airflow.sdk.bases.operator import BaseOperator as TaskSDKBaseOperator +from airflow.sdk import Asset, AssetAlias, AssetAll, AssetAny, AssetWatcher, BaseOperator, XComArg +from airflow.sdk.bases.operator import OPERATOR_DEFAULTS # TODO: Copy this into the scheduler? from airflow.sdk.definitions._internal.expandinput import EXPAND_INPUT_EMPTY +from airflow.sdk.definitions._internal.node import DAGNode from airflow.sdk.definitions.asset import ( - Asset, - AssetAlias, AssetAliasEvent, AssetAliasUniqueKey, - AssetAll, - AssetAny, AssetRef, AssetUniqueKey, - AssetWatcher, BaseAsset, ) +from airflow.sdk.definitions.deadline import DeadlineAlert from airflow.sdk.definitions.mappedoperator import MappedOperator from airflow.sdk.definitions.param import Param, ParamsDict from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup -from airflow.sdk.definitions.xcom_arg import XComArg, serialize_xcom_arg +from airflow.sdk.definitions.xcom_arg import serialize_xcom_arg from airflow.sdk.execution_time.context import OutletEventAccessor, OutletEventAccessors from airflow.serialization.dag_dependency import DagDependency from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding @@ -81,6 +79,11 @@ airflow_priority_weight_strategies, airflow_priority_weight_strategies_classes, ) +from airflow.ti_deps.deps.mapped_task_upstream_dep import MappedTaskUpstreamDep +from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep +from airflow.ti_deps.deps.not_previously_skipped_dep import NotPreviouslySkippedDep +from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep +from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep from airflow.triggers.base import BaseTrigger, StartTriggerArgs from airflow.utils.code_utils import get_python_source from airflow.utils.context import ( @@ -93,20 +96,23 @@ from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.module_loading import import_string, qualname from airflow.utils.operator_resources import Resources -from airflow.utils.timezone import from_timestamp, parse_timezone from airflow.utils.types import NOTSET, ArgNotSet if TYPE_CHECKING: from inspect import Parameter + from sqlalchemy.orm import Session + from airflow.models import DagRun - from airflow.models.expandinput import ExpandInput - from airflow.sdk import BaseOperatorLink - from airflow.sdk.definitions._internal.node import DAGNode - from airflow.sdk.types import Operator + from airflow.models.expandinput import SchedulerExpandInput + from airflow.models.mappedoperator import MappedOperator as SchedulerMappedOperator + from airflow.models.taskinstance import TaskInstance + from airflow.sdk import DAG as SdkDag, BaseOperatorLink from airflow.serialization.json_schema import Validator from airflow.timetables.base import DagRunInfo, DataInterval, Timetable from airflow.triggers.base import BaseEventTrigger + from airflow.typing_compat import Self + from airflow.utils.trigger_rule import TriggerRule HAS_KUBERNETES: bool try: @@ -116,27 +122,20 @@ except ImportError: pass -log = logging.getLogger(__name__) - -_OPERATOR_EXTRA_LINKS: set[str] = { - "airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunLink", - "airflow.providers.standard.sensors.external_task.ExternalDagLink", - # Deprecated names, so that existing serialized dags load straight away. - "airflow.providers.standard.sensors.external_task.ExternalTaskSensorLink", - "airflow.operators.dagrun_operator.TriggerDagRunLink", - "airflow.providers.standard.sensors.external_task_sensor.ExternalTaskSensorLink", -} - + SchedulerOperator: TypeAlias = "SchedulerMappedOperator | SerializedBaseOperator" + SdkOperator: TypeAlias = BaseOperator | MappedOperator -@cache -def get_operator_extra_links() -> set[str]: - """ - Get the operator extra links. +DEFAULT_OPERATOR_DEPS = frozenset( + ( + NotInRetryPeriodDep(), + PrevDagrunDep(), + TriggerRuleDep(), + NotPreviouslySkippedDep(), + MappedTaskUpstreamDep(), + ) +) - This includes both the built-in ones, and those come from the providers. - """ - _OPERATOR_EXTRA_LINKS.update(ProvidersManager().extra_links_class_names) - return _OPERATOR_EXTRA_LINKS +log = logging.getLogger(__name__) @cache @@ -168,7 +167,7 @@ def encode_relativedelta(var: relativedelta.relativedelta) -> dict[str, Any]: def decode_relativedelta(var: dict[str, Any]) -> relativedelta.relativedelta: """Dencode a relativedelta object.""" if "weekday" in var: - var["weekday"] = relativedelta.weekday(*var["weekday"]) # type: ignore + var["weekday"] = relativedelta.weekday(*var["weekday"]) return relativedelta.relativedelta(**var) @@ -251,12 +250,26 @@ def __str__(self) -> str: def _encode_trigger(trigger: BaseEventTrigger | dict): + def _ensure_serialized(d): + """ + Make sure the kwargs dict is JSON-serializable. + + This is done with BaseSerialization logic. A simple check is added to + ensure we don't double-serialize, which is possible when a trigger goes + through multiple serialization layers. + """ + if isinstance(d, dict) and Encoding.TYPE in d: + return d + return BaseSerialization.serialize(d) + if isinstance(trigger, dict): - return trigger - classpath, kwargs = trigger.serialize() + classpath = trigger["classpath"] + kwargs = trigger["kwargs"] + else: + classpath, kwargs = trigger.serialize() return { "classpath": classpath, - "kwargs": kwargs, + "kwargs": {k: _ensure_serialized(v) for k, v in kwargs.items()}, } @@ -323,6 +336,19 @@ def decode_asset_condition(var: dict[str, Any]) -> BaseAsset: raise ValueError(f"deserialization not implemented for DAT {dat!r}") +def smart_decode_trigger_kwargs(d): + """ + Slightly clean up kwargs for display or execution. + + This detects one level of BaseSerialization and tries to deserialize the + content, removing some __type __var ugliness when the value is displayed + in UI to the user and/or while execution. + """ + if not isinstance(d, dict) or Encoding.TYPE not in d: + return d + return BaseSerialization.deserialize(d) + + def decode_asset(var: dict[str, Any]): watchers = var.get("watchers", []) return Asset( @@ -331,7 +357,14 @@ def decode_asset(var: dict[str, Any]): group=var["group"], extra=var["extra"], watchers=[ - SerializedAssetWatcher(name=watcher["name"], trigger=watcher["trigger"]) for watcher in watchers + SerializedAssetWatcher( + name=watcher["name"], + trigger={ + "classpath": watcher["trigger"]["classpath"], + "kwargs": smart_decode_trigger_kwargs(watcher["trigger"]["kwargs"]), + }, + ) + for watcher in watchers ], ) @@ -369,14 +402,14 @@ def encode_outlet_event_accessors(var: OutletEventAccessors) -> dict[str, Any]: "__type": DAT.ASSET_EVENT_ACCESSORS, "_dict": [ {"key": BaseSerialization.serialize(k), "value": encode_outlet_event_accessor(v)} - for k, v in var._dict.items() # type: ignore[attr-defined] + for k, v in var._dict.items() ], } def decode_outlet_event_accessors(var: dict[str, Any]) -> OutletEventAccessors: - d = OutletEventAccessors() # type: ignore[assignment] - d._dict = { # type: ignore[attr-defined] + d = OutletEventAccessors() + d._dict = { BaseSerialization.deserialize(row["key"]): decode_outlet_event_accessor(row["value"]) for row in var["_dict"] } @@ -507,20 +540,13 @@ def deref(self, dag: DAG) -> SchedulerXComArg: # the type declarations in expandinput.py so we always remember to update # serialization logic when adding new ExpandInput variants. If you add things to # the unions, be sure to update _ExpandInputRef to match. -_ExpandInputOriginalValue = Union[ - # For .expand(**kwargs). - Mapping[str, Any], - # For expand_kwargs(arg). - XComArg, - Collection[Union[XComArg, Mapping[str, Any]]], -] -_ExpandInputSerializedValue = Union[ - # For .expand(**kwargs). - Mapping[str, Any], - # For expand_kwargs(arg). - _XComRef, - Collection[Union[_XComRef, Mapping[str, Any]]], -] +# Mapping[str, Any], For .expand(**kwargs). +# XComArg # For expand_kwargs(arg). +_ExpandInputOriginalValue = Mapping[str, Any] | XComArg | Collection[XComArg | Mapping[str, Any]] + +# Mapping[str, Any], For .expand(**kwargs). +# _XComRef For expand_kwargs(arg). +_ExpandInputSerializedValue = Mapping[str, Any] | _XComRef | Collection[_XComRef | Mapping[str, Any]] class _ExpandInputRef(NamedTuple): @@ -544,7 +570,7 @@ def validate_expand_input_value(cls, value: _ExpandInputOriginalValue) -> None: possible ExpandInput cases. """ - def deref(self, dag: DAG) -> ExpandInput: + def deref(self, dag: DAG) -> SchedulerExpandInput: """ De-reference into a concrete ExpandInput object. @@ -585,12 +611,12 @@ class BaseSerialization: SERIALIZER_VERSION = 2 @classmethod - def to_json(cls, var: DAG | BaseOperator | dict | list | set | tuple) -> str: + def to_json(cls, var: DAG | SerializedBaseOperator | dict | list | set | tuple) -> str: """Stringify DAGs and operators contained by var and returns a JSON string of var.""" return json.dumps(cls.to_dict(var), ensure_ascii=True) @classmethod - def to_dict(cls, var: DAG | BaseOperator | dict | list | set | tuple) -> dict: + def to_dict(cls, var: DAG | SerializedBaseOperator | dict | list | set | tuple) -> dict: """Stringify DAGs and operators contained by var and returns a dict of var.""" # Don't call on this class directly - only SerializedDAG or # SerializedBaseOperator should be used as the "entrypoint" @@ -645,7 +671,8 @@ def _is_excluded(cls, var: Any, attrname: str, instance: Any) -> bool: @classmethod def serialize_to_json( cls, - object_to_serialize: TaskSDKBaseOperator | MappedOperator | DAG, + # TODO (GH-52141): When can we remove SerializedBaseOperator here? + object_to_serialize: BaseOperator | MappedOperator | SerializedBaseOperator | SdkDag, decorated_fields: set, ) -> dict[str, Any]: """Serialize an object to JSON.""" @@ -697,6 +724,9 @@ def serialize( # enum.IntEnum is an int instance, it causes json dumps error so we use its value. if isinstance(var, enum.Enum): return var.value + # These are not allowed in JSON. https://datatracker.ietf.org/doc/html/rfc8259#section-6 + if isinstance(var, float) and (math.isnan(var) or math.isinf(var)): + return str(var) return var elif isinstance(var, dict): return cls._encode( @@ -725,11 +755,13 @@ def serialize( ) elif isinstance(var, DAG): return cls._encode(SerializedDAG.serialize_dag(var), type_=DAT.DAG) + elif isinstance(var, DeadlineAlert): + return cls._encode(DeadlineAlert.serialize_deadline_alert(var), type_=DAT.DEADLINE_ALERT) elif isinstance(var, Resources): return var.to_dict() elif isinstance(var, MappedOperator): return cls._encode(SerializedBaseOperator.serialize_mapped_operator(var), type_=DAT.OP) - elif isinstance(var, TaskSDKBaseOperator): + elif isinstance(var, BaseOperator): var._needs_expansion = var.get_needs_expansion() return cls._encode(SerializedBaseOperator.serialize_operator(var), type_=DAT.OP) elif isinstance(var, cls._datetime_types): @@ -807,11 +839,6 @@ def serialize( return cls._encode(serialized_asset, type_=serialized_asset.pop("__type")) elif isinstance(var, AssetRef): return cls._encode(attrs.asdict(var), type_=DAT.ASSET_REF) - elif isinstance(var, SimpleTaskInstance): - return cls._encode( - cls.serialize(var.__dict__, strict=strict), - type_=DAT.SIMPLE_TASK_INSTANCE, - ) elif isinstance(var, Connection): return cls._encode(var.to_dict(validate=True), type_=DAT.CONNECTION) elif isinstance(var, TaskCallbackRequest): @@ -924,8 +951,6 @@ def deserialize(cls, encoded_var: Any) -> Any: return AssetAll(*(decode_asset_condition(x) for x in var["objects"])) elif type_ == DAT.ASSET_REF: return Asset.ref(**var) - elif type_ == DAT.SIMPLE_TASK_INSTANCE: - return SimpleTaskInstance(**cls.deserialize(var)) elif type_ == DAT.CONNECTION: return Connection(**var) elif type_ == DAT.TASK_CALLBACK_REQUEST: @@ -936,6 +961,8 @@ def deserialize(cls, encoded_var: Any) -> Any: return TaskInstanceKey(**var) elif type_ == DAT.ARG_NOT_SET: return NOTSET + elif type_ == DAT.DEADLINE_ALERT: + return DeadlineAlert.deserialize_deadline_alert(var) else: raise TypeError(f"Invalid type {type_!s} in deserialization.") @@ -1061,11 +1088,15 @@ class DependencyDetector: """ @staticmethod - def detect_task_dependencies(task: Operator) -> list[DagDependency]: + def detect_task_dependencies(task: SdkOperator) -> list[DagDependency]: """Detect dependencies caused by tasks.""" from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator from airflow.providers.standard.sensors.external_task import ExternalTaskSensor + # TODO (GH-52141): Separate MappedOperator implementation to get rid of this. + if TYPE_CHECKING: + assert isinstance(task.operator_class, type) + deps = [] if isinstance(task, TriggerDagRunOperator): deps.append( @@ -1079,7 +1110,7 @@ def detect_task_dependencies(task: Operator) -> list[DagDependency]: ) elif ( isinstance(task, MappedOperator) - and issubclass(cast("type[BaseOperator]", task.operator_class), TriggerDagRunOperator) + and issubclass(task.operator_class, TriggerDagRunOperator) and "trigger_dag_id" in task.partial_kwargs ): deps.append( @@ -1103,7 +1134,7 @@ def detect_task_dependencies(task: Operator) -> list[DagDependency]: ) elif ( isinstance(task, MappedOperator) - and issubclass(cast("type[BaseOperator]", task.operator_class), ExternalTaskSensor) + and issubclass(task.operator_class, ExternalTaskSensor) and "external_dag_id" in task.partial_kwargs ): deps.append( @@ -1133,14 +1164,15 @@ def detect_task_dependencies(task: Operator) -> list[DagDependency]: return deps @staticmethod - def detect_dag_dependencies(dag: DAG | None) -> Iterable[DagDependency]: + def detect_dag_dependencies(dag: SdkDag | None) -> Iterable[DagDependency]: """Detect dependencies set directly on the DAG object.""" if not dag: return yield from dag.timetable.asset_condition.iter_dag_dependencies(source="", target=dag.dag_id) -class SerializedBaseOperator(BaseOperator, BaseSerialization): +# TODO (GH-52141): Duplicate DAGNode in the scheduler. +class SerializedBaseOperator(DAGNode, BaseSerialization): """ A JSON serializable representation of operator. @@ -1159,15 +1191,55 @@ class SerializedBaseOperator(BaseOperator, BaseSerialization): _CONSTRUCTOR_PARAMS = { k: v.default - for k, v in itertools.chain( - signature(BaseOperator.__init__).parameters.items(), - signature(TaskSDKBaseOperator.__init__).parameters.items(), - ) + for k, v in signature(BaseOperator.__init__).parameters.items() if v.default is not v.empty } - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + _can_skip_downstream: bool + _is_empty: bool + _needs_expansion: bool + _task_display_name: str | None + depends_on_past: bool + email: str | Sequence[str] | None + execution_timeout: datetime.timedelta | None + executor: str | None + executor_config: dict | None + ignore_first_depends_on_past: bool + inlets: Sequence + is_setup: bool + is_teardown: bool + on_execute_callback: Sequence + on_failure_callback: Sequence + on_retry_callback: Sequence + on_success_callback: Sequence + outlets: Sequence + pool: str + pool_slots: int + priority_weight: int + queue: str + retries: int | None + run_as_user: str | None + start_from_trigger: bool + start_trigger_args: StartTriggerArgs + trigger_rule: TriggerRule + wait_for_downstream: bool + weight_rule: PriorityWeightStrategy + + is_mapped = False + + def __init__( + self, + *, + task_id: str, + params: Mapping[str, Any] | None = None, + _airflow_from_mapped: bool = False, + ) -> None: + super().__init__() + self.__dict__.update(self._CONSTRUCTOR_PARAMS) + self.__dict__.update(OPERATOR_DEFAULTS) + self._BaseOperator__from_mapped = _airflow_from_mapped + self.task_id = task_id + self.params = ParamsDict(params) # task_type is used by UI to display the correct class type, because UI only # receives BaseOperator from deserialized DAGs. self._task_type = "BaseOperator" @@ -1177,26 +1249,30 @@ def __init__(self, *args, **kwargs): self.template_ext = BaseOperator.template_ext self.template_fields = BaseOperator.template_fields self.operator_extra_links = BaseOperator.operator_extra_links - self._operator_name = None + self.deps = DEFAULT_OPERATOR_DEPS + self._operator_name: str | None = None - @cached_property - def operator_extra_link_dict(self) -> dict[str, BaseOperatorLink]: - """Returns dictionary of all extra links for the operator.""" - op_extra_links_from_plugin: dict[str, Any] = {} - from airflow import plugins_manager + @property + def node_id(self) -> str: + return self.task_id - plugins_manager.initialize_extra_operators_links_plugins() - if plugins_manager.operator_extra_links is None: - raise AirflowException("Can't load operators") - for ope in plugins_manager.operator_extra_links: - if ope.operators and self.operator_class in ope.operators: - op_extra_links_from_plugin.update({ope.name: ope}) + def get_dag(self) -> SdkDag | None: + return self.dag + + @property + def roots(self) -> Sequence[DAGNode]: + """Required by DAGNode.""" + return [self] - operator_extra_links_all = {link.name: link for link in self.operator_extra_links} - # Extra links defined in Plugins overrides operator links defined in operator - operator_extra_links_all.update(op_extra_links_from_plugin) + @property + def leaves(self) -> Sequence[DAGNode]: + """Required by DAGNode.""" + return [self] - return operator_extra_links_all + @cached_property + def operator_extra_link_dict(self) -> dict[str, BaseOperatorLink]: + """Returns dictionary of all extra links for the operator.""" + return {link.name: link for link in self.operator_extra_links} @cached_property def global_operator_extra_link_dict(self) -> dict[str, Any]: @@ -1227,7 +1303,7 @@ def get_extra_links(self, ti: TaskInstance, name: str) -> str | None: link = self.operator_extra_link_dict.get(name) or self.global_operator_extra_link_dict.get(name) if not link: return None - return link.get_link(self.unmap(None), ti_key=ti.key) + return link.get_link(self.unmap(None), ti_key=ti.key) # type: ignore[arg-type] # TODO: GH-52141 - BaseOperatorLink.get_link expects BaseOperator but receives SerializedBaseOperator @property def task_type(self) -> str: @@ -1250,6 +1326,31 @@ def operator_name(self) -> str: def operator_name(self, operator_name: str): self._operator_name = operator_name + @property + def task_display_name(self) -> str: + return self._task_display_name or self.task_id + + # TODO (GH-52141): For compatibility... can we just rename this? + @property + def on_failure_fail_dagrun(self): + return self._on_failure_fail_dagrun + + @on_failure_fail_dagrun.setter + def on_failure_fail_dagrun(self, value): + self._on_failure_fail_dagrun = value + + def expand_start_trigger_args(self, *, context: Context) -> StartTriggerArgs | None: + return self.start_trigger_args + + def __getattr__(self, name): + # Handle missing attributes with task_type instead of SerializedBaseOperator + # Don't intercept special methods that Python internals might check + if name.startswith("__") and name.endswith("__"): + # For special methods, raise the original error + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + # For regular attributes, use task_type in the error message + raise AttributeError(f"'{self.task_type}' object has no attribute '{name}'") + @classmethod def serialize_mapped_operator(cls, op: MappedOperator) -> dict[str, Any]: serialized_op = cls._serialize_node(op) @@ -1277,11 +1378,11 @@ def serialize_mapped_operator(cls, op: MappedOperator) -> dict[str, Any]: return serialized_op @classmethod - def serialize_operator(cls, op: TaskSDKBaseOperator | MappedOperator) -> dict[str, Any]: + def serialize_operator(cls, op: BaseOperator | MappedOperator) -> dict[str, Any]: return cls._serialize_node(op) @classmethod - def _serialize_node(cls, op: TaskSDKBaseOperator | MappedOperator) -> dict[str, Any]: + def _serialize_node(cls, op: BaseOperator | MappedOperator) -> dict[str, Any]: """Serialize operator into a JSON object.""" serialize_op = cls.serialize_to_json(op, cls._decorated_fields) @@ -1341,7 +1442,11 @@ def _serialize_node(cls, op: TaskSDKBaseOperator | MappedOperator) -> dict[str, return serialize_op @classmethod - def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None: + def populate_operator( + cls, + op: SchedulerMappedOperator | SerializedBaseOperator, + encoded_op: dict[str, Any], + ) -> None: """ Populate operator attributes with serialized values. @@ -1438,7 +1543,7 @@ def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None: if v is False: raise RuntimeError("_is_sensor=False should never have been serialized!") - object.__setattr__(op, "deps", op.deps | {ReadyToRescheduleDep()}) # type: ignore[union-attr] + object.__setattr__(op, "deps", op.deps | {ReadyToRescheduleDep()}) continue elif ( k in cls._decorated_fields @@ -1481,7 +1586,7 @@ def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None: setattr(op, "start_from_trigger", bool(encoded_op.get("start_from_trigger", False))) @staticmethod - def set_task_dag_references(task: Operator, dag: DAG) -> None: + def set_task_dag_references(task: SchedulerOperator, dag: DAG) -> None: """ Handle DAG references on an operator. @@ -1505,23 +1610,24 @@ def set_task_dag_references(task: Operator, dag: DAG) -> None: dag.task_dict[task_id].upstream_task_ids.add(task.task_id) @classmethod - def deserialize_operator(cls, encoded_op: dict[str, Any]) -> Operator: + def deserialize_operator( + cls, + encoded_op: dict[str, Any], + ) -> SchedulerMappedOperator | SerializedBaseOperator: """Deserializes an operator from a JSON object.""" - op: Operator + op: SchedulerMappedOperator | SerializedBaseOperator if encoded_op.get("_is_mapped", False): # Most of these will be loaded later, these are just some stand-ins. - op_data = { - k: v for k, v in encoded_op.items() if k in TaskSDKBaseOperator.get_serialized_fields() - } + op_data = {k: v for k, v in encoded_op.items() if k in BaseOperator.get_serialized_fields()} - from airflow.models.mappedoperator import MappedOperator as MappedOperatorWithDB + from airflow.models.mappedoperator import MappedOperator as SchedulerMappedOperator try: operator_name = encoded_op["_operator_name"] except KeyError: operator_name = encoded_op["task_type"] - op = MappedOperatorWithDB( + op = SchedulerMappedOperator( operator_class=op_data, expand_input=EXPAND_INPUT_EMPTY, partial_kwargs={}, @@ -1555,7 +1661,7 @@ def deserialize_operator(cls, encoded_op: dict[str, Any]) -> Operator: return op @classmethod - def detect_dependencies(cls, op: Operator) -> set[DagDependency]: + def detect_dependencies(cls, op: SdkOperator) -> set[DagDependency]: """Detect between DAG dependencies for the operator.""" dependency_detector = DependencyDetector() deps = set(dependency_detector.detect_task_dependencies(op)) @@ -1635,6 +1741,141 @@ def serialize(cls, var: Any, *, strict: bool = False) -> Any: def deserialize(cls, encoded_var: Any) -> Any: return BaseSerialization.deserialize(encoded_var=encoded_var) + def serialize_for_task_group(self) -> tuple[DAT, Any]: + """Serialize; required by DAGNode.""" + return DAT.OP, self.task_id + + @property + def inherits_from_empty_operator(self) -> bool: + return self._is_empty + + @property + def inherits_from_skipmixin(self) -> bool: + return self._can_skip_downstream + + def expand_start_from_trigger(self, *, context: Context, session: Session) -> bool: + """ + Get the start_from_trigger value of the current abstract operator. + + Since a BaseOperator is not mapped to begin with, this simply returns + the original value of start_from_trigger. + + :meta private: + """ + return self.start_from_trigger + + def get_serialized_fields(self): + return BaseOperator.get_serialized_fields() + + def unmap(self, resolve: None) -> Self: + return self + + def _iter_all_mapped_downstreams(self) -> Iterator[MappedOperator | MappedTaskGroup]: + """ + Return mapped nodes that are direct dependencies of the current task. + + For now, this walks the entire DAG to find mapped nodes that has this + current task as an upstream. We cannot use ``downstream_list`` since it + only contains operators, not task groups. In the future, we should + provide a way to record an DAG node's all downstream nodes instead. + + Note that this does not guarantee the returned tasks actually use the + current task for task mapping, but only checks those task are mapped + operators, and are downstreams of the current task. + + To get a list of tasks that uses the current task for task mapping, use + :meth:`iter_mapped_dependants` instead. + """ + + def _walk_group(group: TaskGroup) -> Iterable[tuple[str, DAGNode]]: + """ + Recursively walk children in a task group. + + This yields all direct children (including both tasks and task + groups), and all children of any task groups. + """ + for key, child in group.children.items(): + yield key, child + if isinstance(child, TaskGroup): + yield from _walk_group(child) + + if not (dag := self.dag): + raise RuntimeError("Cannot check for mapped dependants when not attached to a DAG") + for key, child in _walk_group(dag.task_group): + if key == self.node_id: + continue + if not isinstance(child, MappedOperator | MappedTaskGroup): + continue + if self.node_id in child.upstream_task_ids: + yield child + + def iter_mapped_dependants(self) -> Iterator[MappedOperator | MappedTaskGroup]: + """ + Return mapped nodes that depend on the current task the expansion. + + For now, this walks the entire DAG to find mapped nodes that has this + current task as an upstream. We cannot use ``downstream_list`` since it + only contains operators, not task groups. In the future, we should + provide a way to record an DAG node's all downstream nodes instead. + """ + return ( + downstream + for downstream in self._iter_all_mapped_downstreams() + if any(p.node_id == self.node_id for p in downstream.iter_mapped_dependencies()) + ) + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + def iter_mapped_task_groups(self) -> Iterator[MappedTaskGroup]: + """ + Return mapped task groups this task belongs to. + + Groups are returned from the innermost to the outmost. + + :meta private: + """ + if (group := self.task_group) is None: + return + yield from group.iter_mapped_task_groups() + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + def get_closest_mapped_task_group(self) -> MappedTaskGroup | None: + """ + Get the mapped task group "closest" to this task in the DAG. + + :meta private: + """ + return next(self.iter_mapped_task_groups(), None) + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + def get_needs_expansion(self) -> bool: + """ + Return true if the task is MappedOperator or is in a mapped task group. + + :meta private: + """ + return self._needs_expansion + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + @methodtools.lru_cache(maxsize=1) + def get_parse_time_mapped_ti_count(self) -> int: + """ + Return the number of mapped task instances that can be created on DAG run creation. + + This only considers literal mapped arguments, and would return *None* + when any non-literal values are used for mapping. + + :raise NotFullyPopulated: If non-literal mapped arguments are encountered. + :raise NotMapped: If the operator is neither mapped, nor has any parent + mapped task groups. + :return: Total number of mapped TIs this task should have. + """ + from airflow.sdk.definitions._internal.abstractoperator import NotMapped + + group = self.get_closest_mapped_task_group() + if group is None: + raise NotMapped() + return group.get_parse_time_mapped_ti_count() + class SerializedDAG(DAG, BaseSerialization): """ @@ -1664,7 +1905,7 @@ def __get_constructor_defaults(): _json_schema = lazy_object_proxy.Proxy(load_dag_schema) @classmethod - def serialize_dag(cls, dag: DAG) -> dict: + def serialize_dag(cls, dag: SdkDag) -> dict: """Serialize a DAG into a JSON object.""" try: serialized_dag = cls.serialize_to_json(dag, cls._decorated_fields) @@ -1680,6 +1921,8 @@ def serialize_dag(cls, dag: DAG) -> dict: serialized_dag["dag_dependencies"] = [x.__dict__ for x in sorted(dag_deps)] serialized_dag["task_group"] = TaskGroupSerialization.serialize_task_group(dag.task_group) + serialized_dag["deadline"] = dag.deadline.serialize_deadline_alert() if dag.deadline else None + # Edge info in the JSON exactly matches our internal structure serialized_dag["edge_info"] = dag.edge_info serialized_dag["params"] = cls._serialize_params_dict(dag.params) @@ -1745,7 +1988,9 @@ def deserialize_dag(cls, encoded_dag: dict[str, Any]) -> SerializedDAG: tg = TaskGroupSerialization.deserialize_task_group( encoded_dag["task_group"], None, - dag.task_dict, + # TODO (GH-52141): SerializedDAG's task_dict should contain + # scheduler types instead, but currently it inherits SDK's DAG. + cast("dict[str, SchedulerOperator]", dag.task_dict), dag, ) object.__setattr__(dag, "task_group", tg) @@ -1762,12 +2007,17 @@ def deserialize_dag(cls, encoded_dag: dict[str, Any]) -> SerializedDAG: if "has_on_failure_callback" in encoded_dag: dag.has_on_failure_callback = True + if "deadline" in encoded_dag and encoded_dag["deadline"] is not None: + dag.deadline = DeadlineAlert.deserialize_deadline_alert(encoded_dag["deadline"]) + keys_to_set_none = dag.get_serialized_fields() - encoded_dag.keys() - cls._CONSTRUCTOR_PARAMS.keys() for k in keys_to_set_none: setattr(dag, k, None) + # TODO (GH-52141): SerializedDAG's task_dict should contain scheduler + # types instead, but currently it inherits SDK's DAG. for task in dag.task_dict.values(): - SerializedBaseOperator.set_task_dag_references(task, dag) + SerializedBaseOperator.set_task_dag_references(cast("SchedulerOperator", task), dag) return dag @@ -1969,7 +2219,7 @@ def deserialize_task_group( cls, encoded_group: dict[str, Any], parent_group: TaskGroup | None, - task_dict: dict[str, Operator], + task_dict: dict[str, SchedulerOperator], dag: SerializedDAG, ) -> TaskGroup: """Deserializes a TaskGroup from a JSON object.""" @@ -1992,7 +2242,7 @@ def deserialize_task_group( **kwargs, ) - def set_ref(task: Operator) -> Operator: + def set_ref(task: SchedulerOperator) -> SchedulerOperator: task.task_group = weakref.proxy(group) return task @@ -2098,7 +2348,11 @@ def timetable(self) -> Timetable: @property def has_task_concurrency_limits(self) -> bool: return any( - task[Encoding.VAR].get("max_active_tis_per_dag") is not None for task in self.data["dag"]["tasks"] + task[Encoding.VAR].get("max_active_tis_per_dag") is not None + or task[Encoding.VAR].get("max_active_tis_per_dagrun") is not None + or task[Encoding.VAR].get("partial_kwargs", {}).get("max_active_tis_per_dag") is not None + or task[Encoding.VAR].get("partial_kwargs", {}).get("max_active_tis_per_dagrun") is not None + for task in self.data["dag"]["tasks"] ) @property @@ -2127,7 +2381,7 @@ def get_task_assets( ports_getter = self._get_mapped_operator_ports else: ports_getter = self._get_base_operator_ports - directions = ("inlets",) if inlets else () + directions: tuple[str, ...] = ("inlets",) if inlets else () if outlets: directions += ("outlets",) for direction in directions: @@ -2140,16 +2394,14 @@ def get_task_assets( if isinstance(obj, of_type): yield task["task_id"], obj - def get_run_data_interval(self, run: DagRun) -> DataInterval: + def get_run_data_interval(self, run: DagRun) -> DataInterval | None: """Get the data interval of this run.""" if run.dag_id is not None and run.dag_id != self.dag_id: raise ValueError(f"Arguments refer to different DAGs: {self.dag_id} != {run.dag_id}") data_interval = _get_model_data_interval(run, "data_interval_start", "data_interval_end") - # the older implementation has call to infer_automated_data_interval if data_interval is None, do we want to keep that or raise - # an exception? - if data_interval is None: - raise ValueError(f"Cannot calculate data interval for run {run}") + if data_interval is None and run.logical_date is not None: + data_interval = self._real_dag.timetable.infer_manual_data_interval(run_after=run.logical_date) return data_interval @@ -2187,3 +2439,27 @@ def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str: ) return "" return XComModel.deserialize_value(value) + + +@overload +def create_scheduler_operator(op: BaseOperator | SerializedBaseOperator) -> SerializedBaseOperator: ... + + +@overload +def create_scheduler_operator(op: MappedOperator | SchedulerMappedOperator) -> SchedulerMappedOperator: ... + + +def create_scheduler_operator( + op: BaseOperator | MappedOperator | SerializedBaseOperator | SchedulerMappedOperator, +) -> SerializedBaseOperator | SchedulerMappedOperator: + from airflow.models.mappedoperator import MappedOperator as SchedulerMappedOperator + + if isinstance(op, (SchedulerMappedOperator, SerializedBaseOperator)): + return op + if isinstance(op, BaseOperator): + d = SerializedBaseOperator.serialize_operator(op) + elif isinstance(op, MappedOperator): + d = SerializedBaseOperator.serialize_mapped_operator(op) + else: + raise TypeError(type(op).__name__) + return SerializedBaseOperator.deserialize_operator(d) diff --git a/airflow-core/src/airflow/serialization/serializers/bignum.py b/airflow-core/src/airflow/serialization/serializers/bignum.py index 769e78491e9e4..5bb89cb386c6d 100644 --- a/airflow-core/src/airflow/serialization/serializers/bignum.py +++ b/airflow-core/src/airflow/serialization/serializers/bignum.py @@ -47,13 +47,13 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return float(o), name, __version__, True -def deserialize(classname: str, version: int, data: object) -> decimal.Decimal: +def deserialize(cls: type, version: int, data: object) -> decimal.Decimal: from decimal import Decimal if version > __version__: - raise TypeError(f"serialized {version} of {classname} > {__version__}") + raise TypeError(f"serialized {version} of {qualname(cls)} > {__version__}") - if classname != qualname(Decimal): - raise TypeError(f"{classname} != {qualname(Decimal)}") + if cls is not Decimal: + raise TypeError(f"do not know how to deserialize {qualname(cls)}") return Decimal(str(data)) diff --git a/airflow-core/src/airflow/serialization/serializers/builtin.py b/airflow-core/src/airflow/serialization/serializers/builtin.py index b0ee8cb713dfb..076831a05dabc 100644 --- a/airflow-core/src/airflow/serialization/serializers/builtin.py +++ b/airflow-core/src/airflow/serialization/serializers/builtin.py @@ -35,20 +35,20 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return list(cast("list", o)), qualname(o), __version__, True -def deserialize(classname: str, version: int, data: list) -> tuple | set | frozenset: +def deserialize(cls: type, version: int, data: list) -> tuple | set | frozenset: if version > __version__: - raise TypeError("serialized version is newer than class version") + raise TypeError(f"serialized version {version} is newer than class version {__version__}") - if classname == qualname(tuple): + if cls is tuple: return tuple(data) - if classname == qualname(set): + if cls is set: return set(data) - if classname == qualname(frozenset): + if cls is frozenset: return frozenset(data) - raise TypeError(f"do not know how to deserialize {classname}") + raise TypeError(f"do not know how to deserialize {qualname(cls)}") def stringify(classname: str, version: int, data: list) -> str: diff --git a/airflow-core/src/airflow/serialization/serializers/datetime.py b/airflow-core/src/airflow/serialization/serializers/datetime.py index 69058b8c02a8b..b5fe17a2e8b52 100644 --- a/airflow-core/src/airflow/serialization/serializers/datetime.py +++ b/airflow-core/src/airflow/serialization/serializers/datetime.py @@ -19,12 +19,12 @@ from typing import TYPE_CHECKING +from airflow._shared.timezones.timezone import parse_timezone from airflow.serialization.serializers.timezone import ( deserialize as deserialize_timezone, serialize as serialize_timezone, ) from airflow.utils.module_loading import qualname -from airflow.utils.timezone import parse_timezone if TYPE_CHECKING: import datetime @@ -59,7 +59,7 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return "", "", 0, False -def deserialize(classname: str, version: int, data: dict | str) -> datetime.date | datetime.timedelta: +def deserialize(cls: type, version: int, data: dict | str) -> datetime.date | datetime.timedelta: import datetime from pendulum import DateTime @@ -86,16 +86,16 @@ def deserialize(classname: str, version: int, data: dict | str) -> datetime.date else None ) - if classname == qualname(datetime.datetime) and isinstance(data, dict): + if cls is datetime.datetime and isinstance(data, dict): return datetime.datetime.fromtimestamp(float(data[TIMESTAMP]), tz=tz) - if classname == qualname(DateTime) and isinstance(data, dict): + if cls is DateTime and isinstance(data, dict): return DateTime.fromtimestamp(float(data[TIMESTAMP]), tz=tz) - if classname == qualname(datetime.timedelta) and isinstance(data, (str, float)): + if cls is datetime.timedelta and isinstance(data, str | float): return datetime.timedelta(seconds=float(data)) - if classname == qualname(datetime.date) and isinstance(data, str): + if cls is datetime.date and isinstance(data, str): return datetime.date.fromisoformat(data) - raise TypeError(f"unknown date/time format {classname}") + raise TypeError(f"unknown date/time format {qualname(cls)}") diff --git a/airflow-core/src/airflow/serialization/serializers/deltalake.py b/airflow-core/src/airflow/serialization/serializers/deltalake.py index 60456baf8007c..a79b231788134 100644 --- a/airflow-core/src/airflow/serialization/serializers/deltalake.py +++ b/airflow-core/src/airflow/serialization/serializers/deltalake.py @@ -55,7 +55,7 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return data, qualname(o), __version__, True -def deserialize(classname: str, version: int, data: dict): +def deserialize(cls: type, version: int, data: dict): from deltalake.table import DeltaTable from airflow.models.crypto import get_fernet @@ -63,7 +63,7 @@ def deserialize(classname: str, version: int, data: dict): if version > __version__: raise TypeError("serialized version is newer than class version") - if classname == qualname(DeltaTable): + if cls is DeltaTable: fernet = get_fernet() properties = {} for k, v in data["storage_options"].items(): @@ -76,4 +76,4 @@ def deserialize(classname: str, version: int, data: dict): return DeltaTable(data["table_uri"], version=data["version"], storage_options=storage_options) - raise TypeError(f"do not know how to deserialize {classname}") + raise TypeError(f"do not know how to deserialize {qualname(cls)}") diff --git a/airflow-core/src/airflow/serialization/serializers/iceberg.py b/airflow-core/src/airflow/serialization/serializers/iceberg.py index 3b03381fef389..018732c29fe35 100644 --- a/airflow-core/src/airflow/serialization/serializers/iceberg.py +++ b/airflow-core/src/airflow/serialization/serializers/iceberg.py @@ -55,7 +55,7 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return data, qualname(o), __version__, True -def deserialize(classname: str, version: int, data: dict): +def deserialize(cls: type, version: int, data: dict): from pyiceberg.catalog import load_catalog from pyiceberg.table import Table @@ -64,7 +64,7 @@ def deserialize(classname: str, version: int, data: dict): if version > __version__: raise TypeError("serialized version is newer than class version") - if classname == qualname(Table): + if cls is Table: fernet = get_fernet() properties = {} for k, v in data["catalog_properties"].items(): @@ -73,4 +73,4 @@ def deserialize(classname: str, version: int, data: dict): catalog = load_catalog(data["identifier"][0], **properties) return catalog.load_table((data["identifier"][1], data["identifier"][2])) - raise TypeError(f"do not know how to deserialize {classname}") + raise TypeError(f"do not know how to deserialize {qualname(cls)}") diff --git a/airflow-core/src/airflow/serialization/serializers/numpy.py b/airflow-core/src/airflow/serialization/serializers/numpy.py index 603f4df44a44c..40d891113f84b 100644 --- a/airflow-core/src/airflow/serialization/serializers/numpy.py +++ b/airflow-core/src/airflow/serialization/serializers/numpy.py @@ -53,40 +53,39 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return "", "", 0, False name = qualname(o) + metadata = (name, __version__, True) if isinstance( o, - ( - np.int_, - np.intc, - np.intp, - np.int8, - np.int16, - np.int32, - np.int64, - np.uint8, - np.uint16, - np.uint32, - np.uint64, - ), + np.int_ + | np.intc + | np.intp + | np.int8 + | np.int16 + | np.int32 + | np.int64 + | np.uint8 + | np.uint16 + | np.uint32 + | np.uint64, ): - return int(o), name, __version__, True + return int(o), *metadata if isinstance(o, np.bool_): - return bool(np), name, __version__, True + return bool(o), *metadata - if isinstance( - o, (np.float_, np.float16, np.float32, np.float64, np.complex_, np.complex64, np.complex128) - ): - return float(o), name, __version__, True + if isinstance(o, (np.float16, np.float32, np.float64, np.complex64, np.complex128)): + return float(o), *metadata return "", "", 0, False -def deserialize(classname: str, version: int, data: str) -> Any: +def deserialize(cls: type, version: int, data: str) -> Any: if version > __version__: raise TypeError("serialized version is newer than class version") - if classname not in deserializers: - raise TypeError(f"unsupported {classname} found for numpy deserialization") + allowed_deserialize_classes = [import_string(classname) for classname in deserializers] + + if cls not in allowed_deserialize_classes: + raise TypeError(f"unsupported {qualname(cls)} found for numpy deserialization") - return import_string(classname)(data) + return cls(data) diff --git a/airflow-core/src/airflow/serialization/serializers/pandas.py b/airflow-core/src/airflow/serialization/serializers/pandas.py index d805e4b95c01e..73f64ce86b4fa 100644 --- a/airflow-core/src/airflow/serialization/serializers/pandas.py +++ b/airflow-core/src/airflow/serialization/serializers/pandas.py @@ -53,17 +53,22 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return buf.getvalue().hex().decode("utf-8"), qualname(o), __version__, True -def deserialize(classname: str, version: int, data: object) -> pd.DataFrame: +def deserialize(cls: type, version: int, data: object) -> pd.DataFrame: if version > __version__: - raise TypeError(f"serialized {version} of {classname} > {__version__}") + raise TypeError(f"serialized {version} of {qualname(cls)} > {__version__}") - from pyarrow import parquet as pq + import pandas as pd + + if cls is not pd.DataFrame: + raise TypeError(f"do not know how to deserialize {qualname(cls)}") if not isinstance(data, str): - raise TypeError(f"serialized {classname} has wrong data type {type(data)}") + raise TypeError(f"serialized {qualname(cls)} has wrong data type {type(data)}") from io import BytesIO + from pyarrow import parquet as pq + with BytesIO(bytes.fromhex(data)) as buf: df = pq.read_table(buf).to_pandas() diff --git a/airflow-core/src/airflow/serialization/serializers/pydantic.py b/airflow-core/src/airflow/serialization/serializers/pydantic.py new file mode 100644 index 0000000000000..91db381264315 --- /dev/null +++ b/airflow-core/src/airflow/serialization/serializers/pydantic.py @@ -0,0 +1,75 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from airflow.serialization.typing import is_pydantic_model +from airflow.utils.module_loading import qualname + +if TYPE_CHECKING: + from airflow.serialization.serde import U + +serializers = [ + "pydantic.main.BaseModel", +] +deserializers = serializers + +__version__ = 1 + + +def serialize(o: object) -> tuple[U, str, int, bool]: + """ + Serialize a Pydantic BaseModel instance into a dict of built-in types. + + Returns a tuple of: + - serialized data (as built-in types) + - fixed class name for registration (BaseModel) + - version number + - is_serialized flag (True if handled) + """ + if not is_pydantic_model(o): + return "", "", 0, False + + data = o.model_dump() # type: ignore + + return data, qualname(o), __version__, True + + +def deserialize(cls: type, version: int, data: dict): + """ + Deserialize a Pydantic class. + + Pydantic models can be serialized into a Python dictionary via `pydantic.main.BaseModel.model_dump` + and the dictionary can be deserialized through `pydantic.main.BaseModel.model_validate`. This function + can deserialize arbitrary Pydantic models that are in `allowed_deserialization_classes`. + + :param cls: The actual model class + :param version: Serialization version (must not exceed __version__) + :param data: Dictionary with built-in types, typically from model_dump() + :return: An instance of the actual Pydantic model + """ + if version > __version__: + raise TypeError(f"Serialized version {version} is newer than the supported version {__version__}") + + if not is_pydantic_model(cls): + # no deserializer available + raise TypeError(f"No deserializer found for {qualname(cls)}") + + # Perform validation-based reconstruction + return cls.model_validate(data) # type: ignore diff --git a/airflow-core/src/airflow/serialization/serializers/timezone.py b/airflow-core/src/airflow/serialization/serializers/timezone.py index 9f2ef7cef65ac..3a67dd8a95ea4 100644 --- a/airflow-core/src/airflow/serialization/serializers/timezone.py +++ b/airflow-core/src/airflow/serialization/serializers/timezone.py @@ -67,18 +67,18 @@ def serialize(o: object) -> tuple[U, str, int, bool]: return "", "", 0, False -def deserialize(classname: str, version: int, data: object) -> Any: - from airflow.utils.timezone import parse_timezone +def deserialize(cls: type, version: int, data: object) -> Any: + from zoneinfo import ZoneInfo + + from airflow._shared.timezones.timezone import parse_timezone if not isinstance(data, (str, int)): raise TypeError(f"{data} is not of type int or str but of {type(data)}") if version > __version__: - raise TypeError(f"serialized {version} of {classname} > {__version__}") - - if classname == "backports.zoneinfo.ZoneInfo" and isinstance(data, str): - from zoneinfo import ZoneInfo + raise TypeError(f"serialized {version} of {qualname(cls)} > {__version__}") + if cls is ZoneInfo and isinstance(data, str): return ZoneInfo(data) return parse_timezone(data) @@ -97,6 +97,6 @@ def _get_tzinfo_name(tzinfo: datetime.tzinfo | None) -> str | None: return tzinfo.name if hasattr(tzinfo, "zone"): # pytz timezone - return tzinfo.zone # type: ignore[no-any-return] + return tzinfo.zone return None diff --git a/airflow-core/src/airflow/serialization/typing.py b/airflow-core/src/airflow/serialization/typing.py new file mode 100644 index 0000000000000..a6169b23a78d5 --- /dev/null +++ b/airflow-core/src/airflow/serialization/typing.py @@ -0,0 +1,32 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Any + + +def is_pydantic_model(cls: Any) -> bool: + """ + Return True if the class is a pydantic.main.BaseModel. + + Checking is done by attributes as it is significantly faster than + using isinstance. + """ + # __pydantic_fields__ is always present on Pydantic V2 models and is a dict[str, FieldInfo] + # __pydantic_validator__ is an internal validator object, always set after model build + return hasattr(cls, "__pydantic_fields__") and hasattr(cls, "__pydantic_validator__") diff --git a/airflow-core/src/airflow/settings.py b/airflow-core/src/airflow/settings.py index ce36f1e676a18..db5ce959b1faf 100644 --- a/airflow-core/src/airflow/settings.py +++ b/airflow-core/src/airflow/settings.py @@ -24,8 +24,9 @@ import os import sys import warnings +from collections.abc import Callable from importlib import metadata -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any, Literal import pluggy from packaging.version import Version @@ -35,12 +36,12 @@ from sqlalchemy.pool import NullPool from airflow import __version__ as airflow_version, policies +from airflow._shared.timezones.timezone import local_timezone, parse_timezone, utc from airflow.configuration import AIRFLOW_HOME, conf from airflow.exceptions import AirflowInternalRuntimeError from airflow.logging_config import configure_logging from airflow.utils.orm_event_handlers import setup_event_handlers from airflow.utils.sqlalchemy import is_sqlalchemy_v1 -from airflow.utils.timezone import local_timezone, parse_timezone, utc if TYPE_CHECKING: from sqlalchemy.engine import Engine @@ -87,6 +88,7 @@ SIMPLE_LOG_FORMAT = conf.get("logging", "simple_log_format") SQL_ALCHEMY_CONN: str | None = None +SQL_ALCHEMY_CONN_ASYNC: str | None = None PLUGINS_FOLDER: str | None = None LOGGING_CLASS_PATH: str | None = None DONOT_MODIFY_HANDLERS: bool | None = None @@ -164,7 +166,7 @@ def replace_showwarning(replacement): original_show_warning = replace_showwarning(custom_show_warning) atexit.register(functools.partial(replace_showwarning, original_show_warning)) -POLICY_PLUGIN_MANAGER: Any = None # type: ignore +POLICY_PLUGIN_MANAGER: Any = None def task_policy(task): @@ -223,8 +225,12 @@ def configure_vars(): global DAGS_FOLDER global PLUGINS_FOLDER global DONOT_MODIFY_HANDLERS - SQL_ALCHEMY_CONN = conf.get("database", "SQL_ALCHEMY_CONN") - SQL_ALCHEMY_CONN_ASYNC = _get_async_conn_uri_from_sync(sync_uri=SQL_ALCHEMY_CONN) + + SQL_ALCHEMY_CONN = conf.get("database", "sql_alchemy_conn") + if conf.has_option("database", "sql_alchemy_conn_async"): + SQL_ALCHEMY_CONN_ASYNC = conf.get("database", "sql_alchemy_conn_async") + else: + SQL_ALCHEMY_CONN_ASYNC = _get_async_conn_uri_from_sync(sync_uri=SQL_ALCHEMY_CONN) DAGS_FOLDER = os.path.expanduser(conf.get("core", "DAGS_FOLDER")) @@ -319,6 +325,41 @@ def _is_sqlite_db_path_relative(sqla_conn_str: str) -> bool: return True +def _get_connect_args(mode: Literal["sync", "async"]) -> Any: + key = { + "sync": "sql_alchemy_connect_args", + "async": "sql_alchemy_connect_args_async", + }[mode] + if conf.has_option("database", key): + return conf.getimport("database", key) + return {} + + +def _configure_async_session() -> None: + """ + Configure async SQLAlchemy session. + + This exists so tests can reconfigure the session. How SQLAlchemy configures + this does not work well with Pytest and you can end up with issues when the + session and runs in a different event loop from the test itself. + """ + global AsyncSession + global async_engine + + async_engine = create_async_engine( + SQL_ALCHEMY_CONN_ASYNC, + connect_args=_get_connect_args("async"), + future=True, + ) + AsyncSession = sessionmaker( + bind=async_engine, + autocommit=False, + autoflush=False, + class_=SAAsyncSession, + expire_on_commit=False, + ) + + def configure_orm(disable_connection_pool=False, pool_class=None): """Configure ORM using SQLAlchemy.""" from airflow.sdk.execution_time.secrets_masker import mask_secret @@ -331,11 +372,9 @@ def configure_orm(disable_connection_pool=False, pool_class=None): "Please use absolute path such as `sqlite:////tmp/airflow.db`." ) + global NonScopedSession global Session global engine - global async_engine - global AsyncSession - global NonScopedSession if os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true": # Skip DB initialization in unit tests, if DB tests are skipped @@ -345,54 +384,46 @@ def configure_orm(disable_connection_pool=False, pool_class=None): log.debug("Setting up DB connection pool (PID %s)", os.getpid()) engine_args = prepare_engine_args(disable_connection_pool, pool_class) - if conf.has_option("database", "sql_alchemy_connect_args"): - connect_args = conf.getimport("database", "sql_alchemy_connect_args") - else: - connect_args = {} - + connect_args = _get_connect_args("sync") if SQL_ALCHEMY_CONN.startswith("sqlite"): # FastAPI runs sync endpoints in a separate thread. SQLite does not allow # to use objects created in another threads by default. Allowing that in test # to so the `test` thread and the tested endpoints can use common objects. connect_args["check_same_thread"] = False - engine = create_engine(SQL_ALCHEMY_CONN, connect_args=connect_args, **engine_args, future=True) - async_engine = create_async_engine(SQL_ALCHEMY_CONN_ASYNC, future=True) - AsyncSession = sessionmaker( - bind=async_engine, - autocommit=False, - autoflush=False, - class_=SAAsyncSession, - expire_on_commit=False, + engine = create_engine( + SQL_ALCHEMY_CONN, + connect_args=connect_args, + **engine_args, + future=True, ) + _configure_async_session() mask_secret(engine.url.password) - setup_event_handlers(engine) if conf.has_option("database", "sql_alchemy_session_maker"): _session_maker = conf.getimport("database", "sql_alchemy_session_maker") else: - - def _session_maker(_engine): - return sessionmaker( - autocommit=False, - autoflush=False, - bind=_engine, - expire_on_commit=False, - ) - + _session_maker = functools.partial( + sessionmaker, + autocommit=False, + autoflush=False, + expire_on_commit=False, + ) NonScopedSession = _session_maker(engine) Session = scoped_session(NonScopedSession) - # https://docs.sqlalchemy.org/en/20/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork - def clean_in_fork(): - _globals = globals() - if engine := _globals.get("engine"): - engine.dispose(close=False) - if async_engine := _globals.get("async_engine"): - async_engine.sync_engine.dispose(close=False) + if register_at_fork := getattr(os, "register_at_fork", None): + # https://docs.sqlalchemy.org/en/20/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork + def clean_in_fork(): + _globals = globals() + if engine := _globals.get("engine"): + engine.dispose(close=False) + if async_engine := _globals.get("async_engine"): + async_engine.sync_engine.dispose(close=False) - os.register_at_fork(after_in_child=clean_in_fork) + # Won't work on Windows + register_at_fork(after_in_child=clean_in_fork) DEFAULT_ENGINE_ARGS = { @@ -412,7 +443,7 @@ def prepare_engine_args(disable_connection_pool=False, pool_class=None): default_args = default.copy() break - engine_args: dict = conf.getjson("database", "sql_alchemy_engine_args", fallback=default_args) # type: ignore + engine_args: dict = conf.getjson("database", "sql_alchemy_engine_args", fallback=default_args) if pool_class: # Don't use separate settings for size etc, only those from sql_alchemy_engine_args @@ -468,8 +499,7 @@ def prepare_engine_args(disable_connection_pool=False, pool_class=None): # running multiple schedulers, as repeated queries on the same session may read from stale snapshots. # 'READ COMMITTED' is the default value for PostgreSQL. # More information here: - # https://dev.mysql.com/doc/refman/8.0/en/innodb-transaction-isolation-levels.html" - + # https://dev.mysql.com/doc/refman/8.0/en/innodb-transaction-isolation-levels.html if SQL_ALCHEMY_CONN.startswith("mysql"): engine_args["isolation_level"] = "READ COMMITTED" @@ -560,12 +590,6 @@ def prepare_syspath_for_config_and_plugins(): sys.path.append(PLUGINS_FOLDER) -def prepare_syspath_for_dags_folder(): - """Update sys.path to include the DAGs folder.""" - if DAGS_FOLDER not in sys.path: - sys.path.append(DAGS_FOLDER) - - def import_local_settings(): """Import airflow_local_settings.py files to allow overriding any configs in settings.py file.""" try: @@ -612,7 +636,6 @@ def initialize(): # in airflow_local_settings to take precendec load_policy_plugins(POLICY_PLUGIN_MANAGER) import_local_settings() - prepare_syspath_for_dags_folder() global LOGGING_CLASS_PATH LOGGING_CLASS_PATH = configure_logging() @@ -620,7 +643,9 @@ def initialize(): # The webservers import this file from models.py with the default settings. if not os.environ.get("PYTHON_OPERATORS_VIRTUAL_ENV_MODE", None): - configure_orm() + is_worker = os.environ.get("_AIRFLOW__REEXECUTED_PROCESS") == "1" + if not is_worker: + configure_orm() configure_action_logging() # mask the sensitive_config_values diff --git a/airflow-core/src/airflow/stats.py b/airflow-core/src/airflow/stats.py index 569bce480653c..6cb9229ab7388 100644 --- a/airflow-core/src/airflow/stats.py +++ b/airflow-core/src/airflow/stats.py @@ -19,7 +19,8 @@ import logging import socket -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.configuration import conf from airflow.metrics.base_stats_logger import NoStatsLogger diff --git a/airflow-core/src/airflow/task/priority_strategy.py b/airflow-core/src/airflow/task/priority_strategy.py index dcef1c865b6e4..f9c4cce15f8d8 100644 --- a/airflow-core/src/airflow/task/priority_strategy.py +++ b/airflow-core/src/airflow/task/priority_strategy.py @@ -52,7 +52,7 @@ def deserialize(cls, data: dict[str, Any]) -> PriorityWeightStrategy: was returned by ``serialize`` during DAG serialization. The default implementation constructs the priority weight strategy without any arguments. """ - return cls(**data) # type: ignore[call-arg] + return cls(**data) def serialize(self) -> dict[str, Any]: """ diff --git a/airflow-core/src/airflow/ti_deps/dep_context.py b/airflow-core/src/airflow/ti_deps/dep_context.py index 421455bc193be..056b633f36122 100644 --- a/airflow-core/src/airflow/ti_deps/dep_context.py +++ b/airflow-core/src/airflow/ti_deps/dep_context.py @@ -18,7 +18,7 @@ from __future__ import annotations import contextlib -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast import attr @@ -29,7 +29,9 @@ from sqlalchemy.orm.session import Session from airflow.models.dagrun import DagRun + from airflow.models.mappedoperator import MappedOperator from airflow.models.taskinstance import TaskInstance + from airflow.serialization.serialized_objects import SerializedBaseOperator @attr.define @@ -95,10 +97,12 @@ def ensure_finished_tis(self, dag_run: DagRun, session: Session) -> list[TaskIns if self.finished_tis is None: finished_tis = dag_run.get_task_instances(state=State.finished, session=session) for ti in finished_tis: - if not getattr(ti, "task", None) is not None and dag_run.dag: - with contextlib.suppress(TaskNotFound): - ti.task = dag_run.dag.get_task(ti.task_id) - + if getattr(ti, "task", None) is not None or (dag := dag_run.dag) is None: + continue + with contextlib.suppress(TaskNotFound): + # TODO (GH-52141): get_task in scheduler should contain scheduler + # types instead, but currently it inherits SDK's DAG. + ti.task = cast("MappedOperator | SerializedBaseOperator", dag.get_task(ti.task_id)) self.finished_tis = finished_tis else: finished_tis = self.finished_tis diff --git a/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py b/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py index 97531ef4257e6..2f3c8015af29c 100644 --- a/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py @@ -22,13 +22,13 @@ from sqlalchemy import select -from airflow.models.taskinstance import TaskInstance from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.utils.state import State, TaskInstanceState if TYPE_CHECKING: from sqlalchemy.orm import Session + from airflow.models.taskinstance import TaskInstance from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.deps.base_ti_dep import TIDepStatus @@ -51,6 +51,7 @@ def _get_dep_statuses( session: Session, dep_context: DepContext, ) -> Iterator[TIDepStatus]: + from airflow.models.taskinstance import TaskInstance from airflow.sdk.definitions.mappedoperator import MappedOperator if isinstance(ti.task, MappedOperator): diff --git a/airflow-core/src/airflow/ti_deps/deps/not_in_retry_period_dep.py b/airflow-core/src/airflow/ti_deps/deps/not_in_retry_period_dep.py index 90954f29f2f50..1013fecdfb811 100644 --- a/airflow-core/src/airflow/ti_deps/deps/not_in_retry_period_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/not_in_retry_period_dep.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations +from airflow._shared.timezones import timezone from airflow.ti_deps.deps.base_ti_dep import BaseTIDep -from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.state import TaskInstanceState diff --git a/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py b/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py index 9ce5c1134240a..1f4363c586ee1 100644 --- a/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py @@ -33,6 +33,7 @@ from sqlalchemy.orm import Session from airflow.sdk.types import Operator + from airflow.serialization.serialized_objects import SerializedBaseOperator _SUCCESSFUL_STATES = (TaskInstanceState.SKIPPED, TaskInstanceState.SUCCESS) @@ -104,7 +105,12 @@ def _count_unsuccessful_tis(dagrun: DagRun, task_id: str, *, session: Session) - ) @staticmethod - def _has_unsuccessful_dependants(dagrun: DagRun, task: Operator, *, session: Session) -> bool: + def _has_unsuccessful_dependants( + dagrun: DagRun, + task: Operator | SerializedBaseOperator, + *, + session: Session, + ) -> bool: """ Check if any of the task's dependants are unsuccessful in a given run. diff --git a/airflow-core/src/airflow/ti_deps/deps/ready_to_reschedule.py b/airflow-core/src/airflow/ti_deps/deps/ready_to_reschedule.py index abe0d38c2b707..501b1574205e1 100644 --- a/airflow-core/src/airflow/ti_deps/deps/ready_to_reschedule.py +++ b/airflow-core/src/airflow/ti_deps/deps/ready_to_reschedule.py @@ -17,10 +17,10 @@ # under the License. from __future__ import annotations +from airflow._shared.timezones import timezone from airflow.executors.executor_loader import ExecutorLoader from airflow.models.taskreschedule import TaskReschedule from airflow.ti_deps.deps.base_ti_dep import BaseTIDep -from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.state import TaskInstanceState diff --git a/airflow-core/src/airflow/ti_deps/deps/runnable_exec_date_dep.py b/airflow-core/src/airflow/ti_deps/deps/runnable_exec_date_dep.py index 0f996e5e9f409..396f8180d1be1 100644 --- a/airflow-core/src/airflow/ti_deps/deps/runnable_exec_date_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/runnable_exec_date_dep.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations +from airflow._shared.timezones import timezone from airflow.ti_deps.deps.base_ti_dep import BaseTIDep -from airflow.utils import timezone from airflow.utils.session import provide_session diff --git a/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py index 1475756cbcbca..dd62af8324a15 100644 --- a/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py @@ -21,14 +21,14 @@ import functools from collections import Counter from collections.abc import Iterator, KeysView -from typing import TYPE_CHECKING, NamedTuple +from typing import TYPE_CHECKING, NamedTuple, cast from sqlalchemy import and_, func, or_, select from airflow.models.taskinstance import PAST_DEPENDS_MET +from airflow.sdk.definitions.taskgroup import MappedTaskGroup from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.utils.state import TaskInstanceState -from airflow.utils.task_group import MappedTaskGroup from airflow.utils.trigger_rule import TriggerRule as TR if TYPE_CHECKING: @@ -36,7 +36,9 @@ from sqlalchemy.sql.expression import ColumnOperators from airflow import DAG + from airflow.models.mappedoperator import MappedOperator from airflow.models.taskinstance import TaskInstance + from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.deps.base_ti_dep import TIDepStatus @@ -139,12 +141,12 @@ def _get_expanded_ti_count() -> int: This extra closure allows us to query the database only when needed, and at most once. """ - from airflow.models.baseoperator import BaseOperator + from airflow.models.mappedoperator import get_mapped_ti_count if TYPE_CHECKING: assert ti.task - return BaseOperator.get_mapped_ti_count(ti.task, ti.run_id, session=session) + return get_mapped_ti_count(ti.task, ti.run_id, session=session) def _iter_expansion_dependencies(task_group: MappedTaskGroup) -> Iterator[str]: from airflow.sdk.definitions.mappedoperator import MappedOperator @@ -184,7 +186,9 @@ def _get_relevant_upstream_map_indexes(upstream_id: str) -> int | range | None: except (NotFullyPopulated, NotMapped): return None return ti.get_relevant_upstream_map_indexes( - upstream=ti.task.dag.task_dict[upstream_id], + # TODO (GH-52141): task_dict in scheduler should contain + # scheduler types instead, but currently it inherits SDK's DAG. + upstream=cast("MappedOperator | SerializedBaseOperator", ti.task.dag.task_dict[upstream_id]), ti_count=expanded_ti_count, session=session, ) @@ -430,6 +434,17 @@ def _evaluate_direct_relatives() -> Iterator[TIDepStatus]: elif trigger_rule == TR.ALL_SKIPPED: if success or failed or upstream_failed: new_state = TaskInstanceState.SKIPPED + elif trigger_rule == TR.ALL_DONE_MIN_ONE_SUCCESS: + # For this trigger rule, skipped tasks are not considered "done" + non_skipped_done = success + failed + upstream_failed + removed + non_skipped_upstream = upstream - skipped + + if skipped > 0: + # There are skipped tasks, so not all tasks are "done" for this rule + new_state = TaskInstanceState.SKIPPED + elif non_skipped_done >= non_skipped_upstream and success == 0: + # All non-skipped tasks are done but no successes + new_state = TaskInstanceState.UPSTREAM_FAILED elif trigger_rule == TR.ALL_DONE_SETUP_SUCCESS: if upstream_done and upstream_setup and skipped_setup >= upstream_setup: # when there is an upstream setup and they have all skipped, then skip @@ -573,6 +588,41 @@ def _evaluate_direct_relatives() -> Iterator[TIDepStatus]: f"upstream_task_ids={task.upstream_task_ids}" ) ) + elif trigger_rule == TR.ALL_DONE_MIN_ONE_SUCCESS: + # For this trigger rule, skipped tasks are not considered "done" + non_skipped_done = success + failed + upstream_failed + removed + non_skipped_upstream = upstream - skipped + if ti.map_index > -1: + non_skipped_upstream -= removed + non_skipped_done -= removed + + if skipped > 0: + yield self._failing_status( + reason=( + f"Task's trigger rule '{trigger_rule}' requires all non-skipped upstream tasks to have " + f"completed, but found {skipped} skipped task(s). " + f"upstream_states={upstream_states}, " + f"upstream_task_ids={task.upstream_task_ids}" + ) + ) + elif non_skipped_done < non_skipped_upstream: + yield self._failing_status( + reason=( + f"Task's trigger rule '{trigger_rule}' requires all non-skipped upstream tasks to have " + f"completed, but found {non_skipped_upstream - non_skipped_done} task(s) that were not done. " + f"upstream_states={upstream_states}, " + f"upstream_task_ids={task.upstream_task_ids}" + ) + ) + elif success == 0: + yield self._failing_status( + reason=( + f"Task's trigger rule '{trigger_rule}' requires all non-skipped upstream tasks to have " + f"completed and at least one upstream task has succeeded, but found " + f"{success} successful task(s). upstream_states={upstream_states}, " + f"upstream_task_ids={task.upstream_task_ids}" + ) + ) else: yield self._failing_status(reason=f"No strategy to evaluate trigger rule '{trigger_rule}'.") diff --git a/airflow-core/src/airflow/timetables/_cron.py b/airflow-core/src/airflow/timetables/_cron.py index a0d7e5091c9d9..e62f96de77029 100644 --- a/airflow-core/src/airflow/timetables/_cron.py +++ b/airflow-core/src/airflow/timetables/_cron.py @@ -22,9 +22,9 @@ from cron_descriptor import CasingTypeEnum, ExpressionDescriptor, FormatException, MissingFieldException from croniter import CroniterBadCronError, CroniterBadDateError, croniter +from airflow._shared.timezones.timezone import convert_to_utc, make_aware, make_naive, parse_timezone from airflow.exceptions import AirflowTimetableInvalid from airflow.utils.dates import cron_presets -from airflow.utils.timezone import convert_to_utc, make_aware, make_naive, parse_timezone if TYPE_CHECKING: from pendulum import DateTime diff --git a/airflow-core/src/airflow/timetables/_delta.py b/airflow-core/src/airflow/timetables/_delta.py index 7203cd406310f..acdf6aa704ec3 100644 --- a/airflow-core/src/airflow/timetables/_delta.py +++ b/airflow-core/src/airflow/timetables/_delta.py @@ -20,8 +20,8 @@ import datetime from typing import TYPE_CHECKING +from airflow._shared.timezones.timezone import convert_to_utc from airflow.exceptions import AirflowTimetableInvalid -from airflow.utils.timezone import convert_to_utc if TYPE_CHECKING: from dateutil.relativedelta import relativedelta diff --git a/airflow-core/src/airflow/timetables/events.py b/airflow-core/src/airflow/timetables/events.py index 42b5d13e2ec78..d8e70626d409a 100644 --- a/airflow-core/src/airflow/timetables/events.py +++ b/airflow-core/src/airflow/timetables/events.py @@ -22,8 +22,8 @@ import pendulum +from airflow._shared.timezones import timezone from airflow.timetables.base import DagRunInfo, DataInterval, Timetable -from airflow.utils import timezone if TYPE_CHECKING: from pendulum import DateTime @@ -62,7 +62,8 @@ def __init__( if description is None: if self.event_dates: self.description = ( - f"{len(self.event_dates)} events between {self.event_dates[0]} and {self.event_dates[-1]}" + f"{len(self.event_dates)} events between " + f"{self.event_dates[0].isoformat(sep='T')} and {self.event_dates[-1].isoformat(sep='T')}" ) else: self.description = "No events" @@ -123,12 +124,17 @@ def serialize(self): return { "event_dates": [x.isoformat(sep="T") for x in self.event_dates], "restrict_to_events": self.restrict_to_events, + "description": self.description, + "_summary": self._summary, } @classmethod def deserialize(cls, data) -> Timetable: - return cls( - [pendulum.DateTime.fromisoformat(x) for x in data["event_dates"]], - data["restrict_to_events"], + time_table = cls( + event_dates=[pendulum.DateTime.fromisoformat(x) for x in data["event_dates"]], + restrict_to_events=data["restrict_to_events"], presorted=True, + description=data["description"], ) + time_table._summary = data["_summary"] + return time_table diff --git a/airflow-core/src/airflow/timetables/interval.py b/airflow-core/src/airflow/timetables/interval.py index f0de30e76de6e..e248dd67da206 100644 --- a/airflow-core/src/airflow/timetables/interval.py +++ b/airflow-core/src/airflow/timetables/interval.py @@ -17,20 +17,20 @@ from __future__ import annotations import datetime -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any from dateutil.relativedelta import relativedelta from pendulum import DateTime +from airflow._shared.timezones.timezone import coerce_datetime, utcnow from airflow.timetables._cron import CronMixin from airflow.timetables._delta import DeltaMixin from airflow.timetables.base import DagRunInfo, DataInterval, Timetable -from airflow.utils.timezone import coerce_datetime, utcnow if TYPE_CHECKING: from airflow.timetables.base import TimeRestriction -Delta = Union[datetime.timedelta, relativedelta] +Delta = datetime.timedelta | relativedelta class _DataIntervalTimetable(Timetable): diff --git a/airflow-core/src/airflow/timetables/simple.py b/airflow-core/src/airflow/timetables/simple.py index 49dcda81a4f6f..b5b6f2468f369 100644 --- a/airflow-core/src/airflow/timetables/simple.py +++ b/airflow-core/src/airflow/timetables/simple.py @@ -19,8 +19,8 @@ from collections.abc import Sequence from typing import TYPE_CHECKING, Any +from airflow._shared.timezones import timezone from airflow.timetables.base import DagRunInfo, DataInterval, Timetable -from airflow.utils import timezone if TYPE_CHECKING: from pendulum import DateTime @@ -101,12 +101,11 @@ def next_dagrun_info( ) -> DagRunInfo | None: if last_automated_data_interval is not None: return None # Already run, no more scheduling. - if restriction.earliest is None: # No start date, won't run. - return None + # If the user does not specify an explicit start_date, the dag is ready. + run_after = restriction.earliest or timezone.coerce_datetime(timezone.utcnow()) # "@once" always schedule to the start_date determined by the DAG and # tasks, regardless of catchup or not. This has been the case since 1.10 # and we're inheriting it. - run_after = restriction.earliest if restriction.latest is not None and run_after > restriction.latest: return None return DagRunInfo.exact(run_after) @@ -135,14 +134,21 @@ def next_dagrun_info( ) -> DagRunInfo | None: if restriction.earliest is None: # No start date, won't run. return None + + current_time = timezone.coerce_datetime(timezone.utcnow()) + if last_automated_data_interval is not None: # has already run once - start = last_automated_data_interval.end - end = timezone.coerce_datetime(timezone.utcnow()) + if last_automated_data_interval.end > current_time: # start date is future + start = restriction.earliest + elapsed = last_automated_data_interval.end - last_automated_data_interval.start + + end = start + elapsed.as_timedelta() + else: + start = last_automated_data_interval.end + end = current_time else: # first run start = restriction.earliest - end = max( - restriction.earliest, timezone.coerce_datetime(timezone.utcnow()) - ) # won't run any earlier than start_date + end = max(restriction.earliest, current_time) if restriction.latest is not None and end > restriction.latest: return None diff --git a/airflow-core/src/airflow/timetables/trigger.py b/airflow-core/src/airflow/timetables/trigger.py index 87e52a3f2640d..ed2f65cf24511 100644 --- a/airflow-core/src/airflow/timetables/trigger.py +++ b/airflow-core/src/airflow/timetables/trigger.py @@ -23,10 +23,10 @@ import time from typing import TYPE_CHECKING, Any +from airflow._shared.timezones.timezone import coerce_datetime, utcnow from airflow.timetables._cron import CronMixin from airflow.timetables._delta import DeltaMixin from airflow.timetables.base import DagRunInfo, DataInterval, Timetable -from airflow.utils.timezone import coerce_datetime, utcnow if TYPE_CHECKING: from dateutil.relativedelta import relativedelta diff --git a/airflow-core/src/airflow/traces/otel_tracer.py b/airflow-core/src/airflow/traces/otel_tracer.py index d5e71e3f47e05..22fb1d2935103 100644 --- a/airflow-core/src/airflow/traces/otel_tracer.py +++ b/airflow-core/src/airflow/traces/otel_tracer.py @@ -19,6 +19,7 @@ import logging import random +from contextlib import AbstractContextManager from typing import TYPE_CHECKING from opentelemetry import trace @@ -32,12 +33,12 @@ from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID +from airflow._shared.timezones import timezone from airflow.configuration import conf from airflow.traces.utils import ( parse_traceparent, parse_tracestate, ) -from airflow.utils import timezone from airflow.utils.dates import datetime_to_nano from airflow.utils.net import get_hostname @@ -247,7 +248,7 @@ def _new_span( links=None, start_time=None, start_as_current: bool = True, - ): + ) -> AbstractContextManager[trace.span.Span] | trace.span.Span: if component is None: component = self.otel_service @@ -260,24 +261,24 @@ def _new_span( links = [] if start_as_current: - span = tracer.start_as_current_span( - name=span_name, - context=parent_context, - links=links, - start_time=datetime_to_nano(start_time), - ) - else: - span = tracer.start_span( + return tracer.start_as_current_span( name=span_name, context=parent_context, links=links, start_time=datetime_to_nano(start_time), ) - current_span_ctx = trace.set_span_in_context(NonRecordingSpan(span.get_span_context())) - # We have to manually make the span context as the active context. - # If the span needs to be injected into the carrier, then this is needed to make sure - # that the injected context will point to the span context that was just created. - attach(current_span_ctx) + + span = tracer.start_span( # type: ignore[assignment] + name=span_name, + context=parent_context, + links=links, + start_time=datetime_to_nano(start_time), + ) + current_span_ctx = trace.set_span_in_context(NonRecordingSpan(span.get_span_context())) # type: ignore[attr-defined] + # We have to manually make the span context as the active context. + # If the span needs to be injected into the carrier, then this is needed to make sure + # that the injected context will point to the span context that was just created. + attach(current_span_ctx) return span def inject(self) -> dict: diff --git a/airflow-core/src/airflow/traces/tracer.py b/airflow-core/src/airflow/traces/tracer.py index 45ed924deda85..327516a063817 100644 --- a/airflow-core/src/airflow/traces/tracer.py +++ b/airflow-core/src/airflow/traces/tracer.py @@ -19,8 +19,9 @@ import logging import socket +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, Protocol +from typing import TYPE_CHECKING, Any, Protocol from airflow.configuration import conf @@ -44,7 +45,7 @@ def gen_links_from_kv_list(list): return gen_links_from_kv_list(list) -def add_span(func): +def add_debug_span(func): """Decorate a function with span.""" func_name = func.__name__ qual_name = func.__qualname__ @@ -53,7 +54,7 @@ def add_span(func): @wraps(func) def wrapper(*args, **kwargs): - with Trace.start_span(span_name=func_name, component=component): + with DebugTrace.start_span(span_name=func_name, component=component): return func(*args, **kwargs) return wrapper @@ -254,6 +255,13 @@ class _TraceMeta(type): factory: Callable[[], Tracer] | None = None instance: Tracer | EmptyTrace | None = None + def __new__(cls, name, bases, attrs): + # Read the debug flag from the class body. + if "check_debug_traces_flag" not in attrs: + raise TypeError(f"Class '{name}' must define 'check_debug_traces_flag'.") + + return super().__new__(cls, name, bases, attrs) + def __getattr__(cls, name: str): if not cls.factory: # Lazy initialization of the factory @@ -276,13 +284,24 @@ def __call__(cls, *args, **kwargs): cls._initialize_instance() return cls.instance - @classmethod def configure_factory(cls): """Configure the trace factory based on settings.""" - if conf.has_option("traces", "otel_on") and conf.getboolean("traces", "otel_on"): + otel_on = conf.getboolean("traces", "otel_on") + + if cls.check_debug_traces_flag: + debug_traces_on = conf.getboolean("traces", "otel_debug_traces_on") + else: + # Set to true so that it will be ignored during the evaluation for the factory instance. + # If this is true, then (otel_on and debug_traces_on) will always evaluate to + # whatever value 'otel_on' has and therefore it will be ignored. + debug_traces_on = True + + if otel_on and debug_traces_on: from airflow.traces import otel_tracer - cls.factory = otel_tracer.get_otel_tracer + cls.factory = staticmethod( + lambda use_simple_processor=False: otel_tracer.get_otel_tracer(cls, use_simple_processor) + ) else: # EmptyTrace is a class and not inherently callable. # Using a lambda ensures it can be invoked as a callable factory. @@ -290,7 +309,6 @@ def configure_factory(cls): # and avoids passing `cls` as an implicit argument. cls.factory = staticmethod(lambda: EmptyTrace()) - @classmethod def get_constant_tags(cls) -> str | None: """Get constant tags to add to all traces.""" return conf.get("traces", "tags", fallback=None) @@ -298,7 +316,15 @@ def get_constant_tags(cls) -> str | None: if TYPE_CHECKING: Trace: EmptyTrace + DebugTrace: EmptyTrace else: class Trace(metaclass=_TraceMeta): """Empty class for Trace - we use metaclass to inject the right one.""" + + check_debug_traces_flag = False + + class DebugTrace(metaclass=_TraceMeta): + """Empty class for Trace and in case the debug traces flag is enabled.""" + + check_debug_traces_flag = True diff --git a/airflow-core/src/airflow/triggers/base.py b/airflow-core/src/airflow/triggers/base.py index 2dfe6880786f6..490423da5fda2 100644 --- a/airflow-core/src/airflow/triggers/base.py +++ b/airflow-core/src/airflow/triggers/base.py @@ -21,7 +21,7 @@ from collections.abc import AsyncIterator from dataclasses import dataclass from datetime import timedelta -from typing import Annotated, Any, Union +from typing import Annotated, Any import structlog from pydantic import ( @@ -229,11 +229,9 @@ def trigger_event_discriminator(v): DiscrimatedTriggerEvent = Annotated[ - Union[ - Annotated[TriggerEvent, Tag("_event_")], - Annotated[TaskSuccessEvent, Tag(TaskInstanceState.SUCCESS)], - Annotated[TaskFailedEvent, Tag(TaskInstanceState.FAILED)], - Annotated[TaskSkippedEvent, Tag(TaskInstanceState.SKIPPED)], - ], + Annotated[TriggerEvent, Tag("_event_")] + | Annotated[TaskSuccessEvent, Tag(TaskInstanceState.SUCCESS)] + | Annotated[TaskFailedEvent, Tag(TaskInstanceState.FAILED)] + | Annotated[TaskSkippedEvent, Tag(TaskInstanceState.SKIPPED)], Discriminator(trigger_event_discriminator), ] diff --git a/airflow-core/src/airflow/triggers/deadline.py b/airflow-core/src/airflow/triggers/deadline.py new file mode 100644 index 0000000000000..4229695854d10 --- /dev/null +++ b/airflow-core/src/airflow/triggers/deadline.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import logging +from collections.abc import AsyncIterator +from typing import Any + +from airflow.triggers.base import BaseTrigger, TriggerEvent +from airflow.utils.module_loading import import_string, qualname + +log = logging.getLogger(__name__) + +PAYLOAD_STATUS_KEY = "state" +PAYLOAD_BODY_KEY = "body" + + +class DeadlineCallbackTrigger(BaseTrigger): + """Trigger that executes a deadline callback function asynchronously.""" + + def __init__(self, callback_path: str, callback_kwargs: dict[str, Any] | None = None): + super().__init__() + self.callback_path = callback_path + self.callback_kwargs = callback_kwargs or {} + + def serialize(self) -> tuple[str, dict[str, Any]]: + return ( + qualname(self), + {attr: getattr(self, attr) for attr in ("callback_path", "callback_kwargs")}, + ) + + async def run(self) -> AsyncIterator[TriggerEvent]: + from airflow.models.deadline import DeadlineCallbackState # to avoid cyclic imports + + try: + callback = import_string(self.callback_path) + result = await callback(**self.callback_kwargs) + log.info("Deadline callback completed with return value: %s", result) + yield TriggerEvent({PAYLOAD_STATUS_KEY: DeadlineCallbackState.SUCCESS, PAYLOAD_BODY_KEY: result}) + except Exception as e: + if isinstance(e, ImportError): + message = "Could not import deadline callback on the triggerer" + elif isinstance(e, TypeError) and "await" in str(e): + message = "Deadline callback not awaitable" + else: + message = "An error occurred while executing deadline callback" + log.exception("%s: %s", message, e) + yield TriggerEvent( + {PAYLOAD_STATUS_KEY: DeadlineCallbackState.FAILED, PAYLOAD_BODY_KEY: f"{message}: {e}"} + ) diff --git a/airflow-core/src/airflow/typing_compat.py b/airflow-core/src/airflow/typing_compat.py index 91a37ae020fec..8a00ac06bd7f1 100644 --- a/airflow-core/src/airflow/typing_compat.py +++ b/airflow-core/src/airflow/typing_compat.py @@ -29,20 +29,8 @@ import sys -# Literal from typing module has various issues in different Python versions, see: -# - https://typing-extensions.readthedocs.io/en/latest/#Literal -# - bpo-45679: https://github.com/python/cpython/pull/29334 -# - bpo-42345: https://github.com/python/cpython/pull/23294 -# - bpo-42345: https://github.com/python/cpython/pull/23383 -if sys.version_info >= (3, 10, 1) or (3, 9, 8) <= sys.version_info < (3, 10): - from typing import Literal -else: - from typing_extensions import Literal # type: ignore[assignment] - -if sys.version_info >= (3, 10): - from typing import ParamSpec, TypeAlias, TypeGuard -else: - from typing_extensions import ParamSpec, TypeAlias, TypeGuard +# Keeping this for backwards-compat with old providers +from typing import Literal, ParamSpec, TypeAlias, TypeGuard if sys.version_info >= (3, 11): from typing import Self diff --git a/airflow-core/src/airflow/ui/.prettierignore b/airflow-core/src/airflow/ui/.prettierignore index 49a8631b874a0..b90ad27ecef3d 100644 --- a/airflow-core/src/airflow/ui/.prettierignore +++ b/airflow-core/src/airflow/ui/.prettierignore @@ -5,3 +5,5 @@ dist/ *.yaml coverage/* .pnpm-store +public/i18n/locales/* +openapi-gen/ diff --git a/airflow-core/src/airflow/ui/CONTRIBUTING.md b/airflow-core/src/airflow/ui/CONTRIBUTING.md index 43f5ac043be42..a7a2ae9831e1d 100644 --- a/airflow-core/src/airflow/ui/CONTRIBUTING.md +++ b/airflow-core/src/airflow/ui/CONTRIBUTING.md @@ -26,7 +26,7 @@ With Breeze: Manually: -- Have the `dev-mode` environment variable enabled +- Have the `DEV_MODE` environment variable set to `true` when starting airflow api-server - Run `pnpm install && pnpm dev` - Note: Make sure to access the UI via the Airflow localhost port (8080 or 28080) and not the vite port (5173) diff --git a/airflow-core/src/airflow/ui/eslint.config.js b/airflow-core/src/airflow/ui/eslint.config.js index 31467415319b3..c7d2363724a95 100644 --- a/airflow-core/src/airflow/ui/eslint.config.js +++ b/airflow-core/src/airflow/ui/eslint.config.js @@ -21,6 +21,9 @@ * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; */ import { coreRules } from "./rules/core.js"; +import { i18nRules } from "./rules/i18n.js"; +import { i18nextRules } from "./rules/i18next.js"; +import { jsoncRules } from "./rules/jsonc.js"; import { perfectionistRules } from "./rules/perfectionist.js"; import { prettierRules } from "./rules/prettier.js"; import { reactRules } from "./rules/react.js"; @@ -34,7 +37,7 @@ import { unicornRules } from "./rules/unicorn.js"; */ export default /** @type {const} @satisfies {ReadonlyArray} */ ([ // Global ignore of dist directory - { ignores: ["**/dist/", "**coverage/"] }, + { ignores: ["**/dist/", "**coverage/", "**/openapi-gen/"] }, // Base rules coreRules, typescriptRules, @@ -44,4 +47,7 @@ export default /** @type {const} @satisfies {ReadonlyArray} * reactRules, stylisticRules, unicornRules, + i18nextRules, + i18nRules, + jsoncRules, ]); diff --git a/airflow-core/src/airflow/ui/index.html b/airflow-core/src/airflow/ui/index.html index a50bf3350bc94..e3f4f943f2530 100644 --- a/airflow-core/src/airflow/ui/index.html +++ b/airflow-core/src/airflow/ui/index.html @@ -1,5 +1,5 @@ - + diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts index a3fcabb4c27f5..65a87575ad64e 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts @@ -1,1902 +1,811 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { UseQueryResult } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { - AssetService, - AuthLinksService, - BackfillService, - ConfigService, - ConnectionService, - DagParsingService, - DagReportService, - DagRunService, - DagService, - DagSourceService, - DagStatsService, - DagVersionService, - DagWarningService, - DagsService, - DashboardService, - DependenciesService, - EventLogService, - ExtraLinksService, - GridService, - ImportErrorService, - JobService, - LoginService, - MonitorService, - PluginService, - PoolService, - ProviderService, - StructureService, - TaskInstanceService, - TaskService, - VariableService, - VersionService, - XcomService, -} from "../requests/services.gen"; +import { UseQueryResult } from "@tanstack/react-query"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; - export type AssetServiceGetAssetsDefaultResponse = Awaited>; -export type AssetServiceGetAssetsQueryResult< - TData = AssetServiceGetAssetsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type AssetServiceGetAssetsQueryResult = UseQueryResult; export const useAssetServiceGetAssetsKey = "AssetServiceGetAssets"; -export const UseAssetServiceGetAssetsKeyFn = ( - { - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; - } = {}, - queryKey?: Array, -) => [ - useAssetServiceGetAssetsKey, - ...(queryKey ?? [{ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }]), -]; -export type AssetServiceGetAssetAliasesDefaultResponse = Awaited< - ReturnType ->; -export type AssetServiceGetAssetAliasesQueryResult< - TData = AssetServiceGetAssetAliasesDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseAssetServiceGetAssetsKeyFn = ({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string[]; + uriPattern?: string; +} = {}, queryKey?: Array) => [useAssetServiceGetAssetsKey, ...(queryKey ?? [{ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }])]; +export type AssetServiceGetAssetAliasesDefaultResponse = Awaited>; +export type AssetServiceGetAssetAliasesQueryResult = UseQueryResult; export const useAssetServiceGetAssetAliasesKey = "AssetServiceGetAssetAliases"; -export const UseAssetServiceGetAssetAliasesKeyFn = ( - { - limit, - namePattern, - offset, - orderBy, - }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: Array, -) => [useAssetServiceGetAssetAliasesKey, ...(queryKey ?? [{ limit, namePattern, offset, orderBy }])]; +export const UseAssetServiceGetAssetAliasesKeyFn = ({ limit, namePattern, offset, orderBy }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: Array) => [useAssetServiceGetAssetAliasesKey, ...(queryKey ?? [{ limit, namePattern, offset, orderBy }])]; export type AssetServiceGetAssetAliasDefaultResponse = Awaited>; -export type AssetServiceGetAssetAliasQueryResult< - TData = AssetServiceGetAssetAliasDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type AssetServiceGetAssetAliasQueryResult = UseQueryResult; export const useAssetServiceGetAssetAliasKey = "AssetServiceGetAssetAlias"; -export const UseAssetServiceGetAssetAliasKeyFn = ( - { - assetAliasId, - }: { - assetAliasId: number; - }, - queryKey?: Array, -) => [useAssetServiceGetAssetAliasKey, ...(queryKey ?? [{ assetAliasId }])]; -export type AssetServiceGetAssetEventsDefaultResponse = Awaited< - ReturnType ->; -export type AssetServiceGetAssetEventsQueryResult< - TData = AssetServiceGetAssetEventsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseAssetServiceGetAssetAliasKeyFn = ({ assetAliasId }: { + assetAliasId: number; +}, queryKey?: Array) => [useAssetServiceGetAssetAliasKey, ...(queryKey ?? [{ assetAliasId }])]; +export type AssetServiceGetAssetEventsDefaultResponse = Awaited>; +export type AssetServiceGetAssetEventsQueryResult = UseQueryResult; export const useAssetServiceGetAssetEventsKey = "AssetServiceGetAssetEvents"; -export const UseAssetServiceGetAssetEventsKeyFn = ( - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; - } = {}, - queryKey?: Array, -) => [ - useAssetServiceGetAssetEventsKey, - ...(queryKey ?? [ - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }, - ]), -]; -export type AssetServiceGetAssetQueuedEventsDefaultResponse = Awaited< - ReturnType ->; -export type AssetServiceGetAssetQueuedEventsQueryResult< - TData = AssetServiceGetAssetQueuedEventsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseAssetServiceGetAssetEventsKeyFn = ({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string[]; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; +} = {}, queryKey?: Array) => [useAssetServiceGetAssetEventsKey, ...(queryKey ?? [{ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }])]; +export type AssetServiceGetAssetQueuedEventsDefaultResponse = Awaited>; +export type AssetServiceGetAssetQueuedEventsQueryResult = UseQueryResult; export const useAssetServiceGetAssetQueuedEventsKey = "AssetServiceGetAssetQueuedEvents"; -export const UseAssetServiceGetAssetQueuedEventsKeyFn = ( - { - assetId, - before, - }: { - assetId: number; - before?: string; - }, - queryKey?: Array, -) => [useAssetServiceGetAssetQueuedEventsKey, ...(queryKey ?? [{ assetId, before }])]; +export const UseAssetServiceGetAssetQueuedEventsKeyFn = ({ assetId, before }: { + assetId: number; + before?: string; +}, queryKey?: Array) => [useAssetServiceGetAssetQueuedEventsKey, ...(queryKey ?? [{ assetId, before }])]; export type AssetServiceGetAssetDefaultResponse = Awaited>; -export type AssetServiceGetAssetQueryResult< - TData = AssetServiceGetAssetDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type AssetServiceGetAssetQueryResult = UseQueryResult; export const useAssetServiceGetAssetKey = "AssetServiceGetAsset"; -export const UseAssetServiceGetAssetKeyFn = ( - { - assetId, - }: { - assetId: number; - }, - queryKey?: Array, -) => [useAssetServiceGetAssetKey, ...(queryKey ?? [{ assetId }])]; -export type AssetServiceGetDagAssetQueuedEventsDefaultResponse = Awaited< - ReturnType ->; -export type AssetServiceGetDagAssetQueuedEventsQueryResult< - TData = AssetServiceGetDagAssetQueuedEventsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseAssetServiceGetAssetKeyFn = ({ assetId }: { + assetId: number; +}, queryKey?: Array) => [useAssetServiceGetAssetKey, ...(queryKey ?? [{ assetId }])]; +export type AssetServiceGetDagAssetQueuedEventsDefaultResponse = Awaited>; +export type AssetServiceGetDagAssetQueuedEventsQueryResult = UseQueryResult; export const useAssetServiceGetDagAssetQueuedEventsKey = "AssetServiceGetDagAssetQueuedEvents"; -export const UseAssetServiceGetDagAssetQueuedEventsKeyFn = ( - { - before, - dagId, - }: { - before?: string; - dagId: string; - }, - queryKey?: Array, -) => [useAssetServiceGetDagAssetQueuedEventsKey, ...(queryKey ?? [{ before, dagId }])]; -export type AssetServiceGetDagAssetQueuedEventDefaultResponse = Awaited< - ReturnType ->; -export type AssetServiceGetDagAssetQueuedEventQueryResult< - TData = AssetServiceGetDagAssetQueuedEventDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseAssetServiceGetDagAssetQueuedEventsKeyFn = ({ before, dagId }: { + before?: string; + dagId: string; +}, queryKey?: Array) => [useAssetServiceGetDagAssetQueuedEventsKey, ...(queryKey ?? [{ before, dagId }])]; +export type AssetServiceGetDagAssetQueuedEventDefaultResponse = Awaited>; +export type AssetServiceGetDagAssetQueuedEventQueryResult = UseQueryResult; export const useAssetServiceGetDagAssetQueuedEventKey = "AssetServiceGetDagAssetQueuedEvent"; -export const UseAssetServiceGetDagAssetQueuedEventKeyFn = ( - { - assetId, - before, - dagId, - }: { - assetId: number; - before?: string; - dagId: string; - }, - queryKey?: Array, -) => [useAssetServiceGetDagAssetQueuedEventKey, ...(queryKey ?? [{ assetId, before, dagId }])]; +export const UseAssetServiceGetDagAssetQueuedEventKeyFn = ({ assetId, before, dagId }: { + assetId: number; + before?: string; + dagId: string; +}, queryKey?: Array) => [useAssetServiceGetDagAssetQueuedEventKey, ...(queryKey ?? [{ assetId, before, dagId }])]; export type AssetServiceNextRunAssetsDefaultResponse = Awaited>; -export type AssetServiceNextRunAssetsQueryResult< - TData = AssetServiceNextRunAssetsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type AssetServiceNextRunAssetsQueryResult = UseQueryResult; export const useAssetServiceNextRunAssetsKey = "AssetServiceNextRunAssets"; -export const UseAssetServiceNextRunAssetsKeyFn = ( - { - dagId, - }: { - dagId: string; - }, - queryKey?: Array, -) => [useAssetServiceNextRunAssetsKey, ...(queryKey ?? [{ dagId }])]; -export type BackfillServiceListBackfillsDefaultResponse = Awaited< - ReturnType ->; -export type BackfillServiceListBackfillsQueryResult< - TData = BackfillServiceListBackfillsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseAssetServiceNextRunAssetsKeyFn = ({ dagId }: { + dagId: string; +}, queryKey?: Array) => [useAssetServiceNextRunAssetsKey, ...(queryKey ?? [{ dagId }])]; +export type BackfillServiceListBackfillsDefaultResponse = Awaited>; +export type BackfillServiceListBackfillsQueryResult = UseQueryResult; export const useBackfillServiceListBackfillsKey = "BackfillServiceListBackfills"; -export const UseBackfillServiceListBackfillsKeyFn = ( - { - dagId, - limit, - offset, - orderBy, - }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - }, - queryKey?: Array, -) => [useBackfillServiceListBackfillsKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy }])]; -export type BackfillServiceGetBackfillDefaultResponse = Awaited< - ReturnType ->; -export type BackfillServiceGetBackfillQueryResult< - TData = BackfillServiceGetBackfillDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseBackfillServiceListBackfillsKeyFn = ({ dagId, limit, offset, orderBy }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; +}, queryKey?: Array) => [useBackfillServiceListBackfillsKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy }])]; +export type BackfillServiceGetBackfillDefaultResponse = Awaited>; +export type BackfillServiceGetBackfillQueryResult = UseQueryResult; export const useBackfillServiceGetBackfillKey = "BackfillServiceGetBackfill"; -export const UseBackfillServiceGetBackfillKeyFn = ( - { - backfillId, - }: { - backfillId: string; - }, - queryKey?: Array, -) => [useBackfillServiceGetBackfillKey, ...(queryKey ?? [{ backfillId }])]; -export type BackfillServiceListBackfills1DefaultResponse = Awaited< - ReturnType ->; -export type BackfillServiceListBackfills1QueryResult< - TData = BackfillServiceListBackfills1DefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useBackfillServiceListBackfills1Key = "BackfillServiceListBackfills1"; -export const UseBackfillServiceListBackfills1KeyFn = ( - { - active, - dagId, - limit, - offset, - orderBy, - }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: Array, -) => [useBackfillServiceListBackfills1Key, ...(queryKey ?? [{ active, dagId, limit, offset, orderBy }])]; -export type ConnectionServiceGetConnectionDefaultResponse = Awaited< - ReturnType ->; -export type ConnectionServiceGetConnectionQueryResult< - TData = ConnectionServiceGetConnectionDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseBackfillServiceGetBackfillKeyFn = ({ backfillId }: { + backfillId: number; +}, queryKey?: Array) => [useBackfillServiceGetBackfillKey, ...(queryKey ?? [{ backfillId }])]; +export type BackfillServiceListBackfillsUiDefaultResponse = Awaited>; +export type BackfillServiceListBackfillsUiQueryResult = UseQueryResult; +export const useBackfillServiceListBackfillsUiKey = "BackfillServiceListBackfillsUi"; +export const UseBackfillServiceListBackfillsUiKeyFn = ({ active, dagId, limit, offset, orderBy }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: Array) => [useBackfillServiceListBackfillsUiKey, ...(queryKey ?? [{ active, dagId, limit, offset, orderBy }])]; +export type ConnectionServiceGetConnectionDefaultResponse = Awaited>; +export type ConnectionServiceGetConnectionQueryResult = UseQueryResult; export const useConnectionServiceGetConnectionKey = "ConnectionServiceGetConnection"; -export const UseConnectionServiceGetConnectionKeyFn = ( - { - connectionId, - }: { - connectionId: string; - }, - queryKey?: Array, -) => [useConnectionServiceGetConnectionKey, ...(queryKey ?? [{ connectionId }])]; -export type ConnectionServiceGetConnectionsDefaultResponse = Awaited< - ReturnType ->; -export type ConnectionServiceGetConnectionsQueryResult< - TData = ConnectionServiceGetConnectionsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseConnectionServiceGetConnectionKeyFn = ({ connectionId }: { + connectionId: string; +}, queryKey?: Array) => [useConnectionServiceGetConnectionKey, ...(queryKey ?? [{ connectionId }])]; +export type ConnectionServiceGetConnectionsDefaultResponse = Awaited>; +export type ConnectionServiceGetConnectionsQueryResult = UseQueryResult; export const useConnectionServiceGetConnectionsKey = "ConnectionServiceGetConnections"; -export const UseConnectionServiceGetConnectionsKeyFn = ( - { - connectionIdPattern, - limit, - offset, - orderBy, - }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: Array, -) => [ - useConnectionServiceGetConnectionsKey, - ...(queryKey ?? [{ connectionIdPattern, limit, offset, orderBy }]), -]; -export type ConnectionServiceHookMetaDataDefaultResponse = Awaited< - ReturnType ->; -export type ConnectionServiceHookMetaDataQueryResult< - TData = ConnectionServiceHookMetaDataDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseConnectionServiceGetConnectionsKeyFn = ({ connectionIdPattern, limit, offset, orderBy }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: Array) => [useConnectionServiceGetConnectionsKey, ...(queryKey ?? [{ connectionIdPattern, limit, offset, orderBy }])]; +export type ConnectionServiceHookMetaDataDefaultResponse = Awaited>; +export type ConnectionServiceHookMetaDataQueryResult = UseQueryResult; export const useConnectionServiceHookMetaDataKey = "ConnectionServiceHookMetaData"; -export const UseConnectionServiceHookMetaDataKeyFn = (queryKey?: Array) => [ - useConnectionServiceHookMetaDataKey, - ...(queryKey ?? []), -]; +export const UseConnectionServiceHookMetaDataKeyFn = (queryKey?: Array) => [useConnectionServiceHookMetaDataKey, ...(queryKey ?? [])]; export type DagRunServiceGetDagRunDefaultResponse = Awaited>; -export type DagRunServiceGetDagRunQueryResult< - TData = DagRunServiceGetDagRunDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type DagRunServiceGetDagRunQueryResult = UseQueryResult; export const useDagRunServiceGetDagRunKey = "DagRunServiceGetDagRun"; -export const UseDagRunServiceGetDagRunKeyFn = ( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: Array, -) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; -export type DagRunServiceGetUpstreamAssetEventsDefaultResponse = Awaited< - ReturnType ->; -export type DagRunServiceGetUpstreamAssetEventsQueryResult< - TData = DagRunServiceGetUpstreamAssetEventsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDagRunServiceGetDagRunKeyFn = ({ dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}, queryKey?: Array) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; +export type DagRunServiceGetUpstreamAssetEventsDefaultResponse = Awaited>; +export type DagRunServiceGetUpstreamAssetEventsQueryResult = UseQueryResult; export const useDagRunServiceGetUpstreamAssetEventsKey = "DagRunServiceGetUpstreamAssetEvents"; -export const UseDagRunServiceGetUpstreamAssetEventsKeyFn = ( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: Array, -) => [useDagRunServiceGetUpstreamAssetEventsKey, ...(queryKey ?? [{ dagId, dagRunId }])]; +export const UseDagRunServiceGetUpstreamAssetEventsKeyFn = ({ dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}, queryKey?: Array) => [useDagRunServiceGetUpstreamAssetEventsKey, ...(queryKey ?? [{ dagId, dagRunId }])]; export type DagRunServiceGetDagRunsDefaultResponse = Awaited>; -export type DagRunServiceGetDagRunsQueryResult< - TData = DagRunServiceGetDagRunsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type DagRunServiceGetDagRunsQueryResult = UseQueryResult; export const useDagRunServiceGetDagRunsKey = "DagRunServiceGetDagRuns"; -export const UseDagRunServiceGetDagRunsKeyFn = ( - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; - }, - queryKey?: Array, -) => [ - useDagRunServiceGetDagRunsKey, - ...(queryKey ?? [ - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }, - ]), -]; -export type DagSourceServiceGetDagSourceDefaultResponse = Awaited< - ReturnType ->; -export type DagSourceServiceGetDagSourceQueryResult< - TData = DagSourceServiceGetDagSourceDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDagRunServiceGetDagRunsKeyFn = ({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; + runIdPattern?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + triggeringUserNamePattern?: string; + updatedAtGte?: string; + updatedAtLte?: string; +}, queryKey?: Array) => [useDagRunServiceGetDagRunsKey, ...(queryKey ?? [{ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }])]; +export type DagRunServiceWaitDagRunUntilFinishedDefaultResponse = Awaited>; +export type DagRunServiceWaitDagRunUntilFinishedQueryResult = UseQueryResult; +export const useDagRunServiceWaitDagRunUntilFinishedKey = "DagRunServiceWaitDagRunUntilFinished"; +export const UseDagRunServiceWaitDagRunUntilFinishedKeyFn = ({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: Array) => [useDagRunServiceWaitDagRunUntilFinishedKey, ...(queryKey ?? [{ dagId, dagRunId, interval, result }])]; +export type ExperimentalServiceWaitDagRunUntilFinishedDefaultResponse = Awaited>; +export type ExperimentalServiceWaitDagRunUntilFinishedQueryResult = UseQueryResult; +export const useExperimentalServiceWaitDagRunUntilFinishedKey = "ExperimentalServiceWaitDagRunUntilFinished"; +export const UseExperimentalServiceWaitDagRunUntilFinishedKeyFn = ({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: Array) => [useExperimentalServiceWaitDagRunUntilFinishedKey, ...(queryKey ?? [{ dagId, dagRunId, interval, result }])]; +export type DagSourceServiceGetDagSourceDefaultResponse = Awaited>; +export type DagSourceServiceGetDagSourceQueryResult = UseQueryResult; export const useDagSourceServiceGetDagSourceKey = "DagSourceServiceGetDagSource"; -export const UseDagSourceServiceGetDagSourceKeyFn = ( - { - accept, - dagId, - versionNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; - }, - queryKey?: Array, -) => [useDagSourceServiceGetDagSourceKey, ...(queryKey ?? [{ accept, dagId, versionNumber }])]; -export type DagStatsServiceGetDagStatsDefaultResponse = Awaited< - ReturnType ->; -export type DagStatsServiceGetDagStatsQueryResult< - TData = DagStatsServiceGetDagStatsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDagSourceServiceGetDagSourceKeyFn = ({ accept, dagId, versionNumber }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; +}, queryKey?: Array) => [useDagSourceServiceGetDagSourceKey, ...(queryKey ?? [{ accept, dagId, versionNumber }])]; +export type DagStatsServiceGetDagStatsDefaultResponse = Awaited>; +export type DagStatsServiceGetDagStatsQueryResult = UseQueryResult; export const useDagStatsServiceGetDagStatsKey = "DagStatsServiceGetDagStats"; -export const UseDagStatsServiceGetDagStatsKeyFn = ( - { - dagIds, - }: { - dagIds?: string[]; - } = {}, - queryKey?: Array, -) => [useDagStatsServiceGetDagStatsKey, ...(queryKey ?? [{ dagIds }])]; -export type DagReportServiceGetDagReportsDefaultResponse = Awaited< - ReturnType ->; -export type DagReportServiceGetDagReportsQueryResult< - TData = DagReportServiceGetDagReportsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDagStatsServiceGetDagStatsKeyFn = ({ dagIds }: { + dagIds?: string[]; +} = {}, queryKey?: Array) => [useDagStatsServiceGetDagStatsKey, ...(queryKey ?? [{ dagIds }])]; +export type DagReportServiceGetDagReportsDefaultResponse = Awaited>; +export type DagReportServiceGetDagReportsQueryResult = UseQueryResult; export const useDagReportServiceGetDagReportsKey = "DagReportServiceGetDagReports"; -export const UseDagReportServiceGetDagReportsKeyFn = ( - { - subdir, - }: { - subdir: string; - }, - queryKey?: Array, -) => [useDagReportServiceGetDagReportsKey, ...(queryKey ?? [{ subdir }])]; +export const UseDagReportServiceGetDagReportsKeyFn = ({ subdir }: { + subdir: string; +}, queryKey?: Array) => [useDagReportServiceGetDagReportsKey, ...(queryKey ?? [{ subdir }])]; export type ConfigServiceGetConfigDefaultResponse = Awaited>; -export type ConfigServiceGetConfigQueryResult< - TData = ConfigServiceGetConfigDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type ConfigServiceGetConfigQueryResult = UseQueryResult; export const useConfigServiceGetConfigKey = "ConfigServiceGetConfig"; -export const UseConfigServiceGetConfigKeyFn = ( - { - accept, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; - } = {}, - queryKey?: Array, -) => [useConfigServiceGetConfigKey, ...(queryKey ?? [{ accept, section }])]; -export type ConfigServiceGetConfigValueDefaultResponse = Awaited< - ReturnType ->; -export type ConfigServiceGetConfigValueQueryResult< - TData = ConfigServiceGetConfigValueDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseConfigServiceGetConfigKeyFn = ({ accept, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; +} = {}, queryKey?: Array) => [useConfigServiceGetConfigKey, ...(queryKey ?? [{ accept, section }])]; +export type ConfigServiceGetConfigValueDefaultResponse = Awaited>; +export type ConfigServiceGetConfigValueQueryResult = UseQueryResult; export const useConfigServiceGetConfigValueKey = "ConfigServiceGetConfigValue"; -export const UseConfigServiceGetConfigValueKeyFn = ( - { - accept, - option, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; - }, - queryKey?: Array, -) => [useConfigServiceGetConfigValueKey, ...(queryKey ?? [{ accept, option, section }])]; +export const UseConfigServiceGetConfigValueKeyFn = ({ accept, option, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; +}, queryKey?: Array) => [useConfigServiceGetConfigValueKey, ...(queryKey ?? [{ accept, option, section }])]; export type ConfigServiceGetConfigsDefaultResponse = Awaited>; -export type ConfigServiceGetConfigsQueryResult< - TData = ConfigServiceGetConfigsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type ConfigServiceGetConfigsQueryResult = UseQueryResult; export const useConfigServiceGetConfigsKey = "ConfigServiceGetConfigs"; -export const UseConfigServiceGetConfigsKeyFn = (queryKey?: Array) => [ - useConfigServiceGetConfigsKey, - ...(queryKey ?? []), -]; -export type DagWarningServiceListDagWarningsDefaultResponse = Awaited< - ReturnType ->; -export type DagWarningServiceListDagWarningsQueryResult< - TData = DagWarningServiceListDagWarningsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseConfigServiceGetConfigsKeyFn = (queryKey?: Array) => [useConfigServiceGetConfigsKey, ...(queryKey ?? [])]; +export type DagWarningServiceListDagWarningsDefaultResponse = Awaited>; +export type DagWarningServiceListDagWarningsQueryResult = UseQueryResult; export const useDagWarningServiceListDagWarningsKey = "DagWarningServiceListDagWarnings"; -export const UseDagWarningServiceListDagWarningsKeyFn = ( - { - dagId, - limit, - offset, - orderBy, - warningType, - }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; - } = {}, - queryKey?: Array, -) => [ - useDagWarningServiceListDagWarningsKey, - ...(queryKey ?? [{ dagId, limit, offset, orderBy, warningType }]), -]; +export const UseDagWarningServiceListDagWarningsKeyFn = ({ dagId, limit, offset, orderBy, warningType }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + warningType?: DagWarningType; +} = {}, queryKey?: Array) => [useDagWarningServiceListDagWarningsKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy, warningType }])]; export type DagServiceGetDagsDefaultResponse = Awaited>; -export type DagServiceGetDagsQueryResult< - TData = DagServiceGetDagsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type DagServiceGetDagsQueryResult = UseQueryResult; export const useDagServiceGetDagsKey = "DagServiceGetDags"; -export const UseDagServiceGetDagsKeyFn = ( - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, - queryKey?: Array, -) => [ - useDagServiceGetDagsKey, - ...(queryKey ?? [ - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }, - ]), -]; +export const UseDagServiceGetDagsKeyFn = ({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}, queryKey?: Array) => [useDagServiceGetDagsKey, ...(queryKey ?? [{ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }])]; export type DagServiceGetDagDefaultResponse = Awaited>; -export type DagServiceGetDagQueryResult< - TData = DagServiceGetDagDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type DagServiceGetDagQueryResult = UseQueryResult; export const useDagServiceGetDagKey = "DagServiceGetDag"; -export const UseDagServiceGetDagKeyFn = ( - { - dagId, - }: { - dagId: string; - }, - queryKey?: Array, -) => [useDagServiceGetDagKey, ...(queryKey ?? [{ dagId }])]; +export const UseDagServiceGetDagKeyFn = ({ dagId }: { + dagId: string; +}, queryKey?: Array) => [useDagServiceGetDagKey, ...(queryKey ?? [{ dagId }])]; export type DagServiceGetDagDetailsDefaultResponse = Awaited>; -export type DagServiceGetDagDetailsQueryResult< - TData = DagServiceGetDagDetailsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type DagServiceGetDagDetailsQueryResult = UseQueryResult; export const useDagServiceGetDagDetailsKey = "DagServiceGetDagDetails"; -export const UseDagServiceGetDagDetailsKeyFn = ( - { - dagId, - }: { - dagId: string; - }, - queryKey?: Array, -) => [useDagServiceGetDagDetailsKey, ...(queryKey ?? [{ dagId }])]; +export const UseDagServiceGetDagDetailsKeyFn = ({ dagId }: { + dagId: string; +}, queryKey?: Array) => [useDagServiceGetDagDetailsKey, ...(queryKey ?? [{ dagId }])]; export type DagServiceGetDagTagsDefaultResponse = Awaited>; -export type DagServiceGetDagTagsQueryResult< - TData = DagServiceGetDagTagsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type DagServiceGetDagTagsQueryResult = UseQueryResult; export const useDagServiceGetDagTagsKey = "DagServiceGetDagTags"; -export const UseDagServiceGetDagTagsKeyFn = ( - { - limit, - offset, - orderBy, - tagNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; - } = {}, - queryKey?: Array, -) => [useDagServiceGetDagTagsKey, ...(queryKey ?? [{ limit, offset, orderBy, tagNamePattern }])]; -export type EventLogServiceGetEventLogDefaultResponse = Awaited< - ReturnType ->; -export type EventLogServiceGetEventLogQueryResult< - TData = EventLogServiceGetEventLogDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDagServiceGetDagTagsKeyFn = ({ limit, offset, orderBy, tagNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + tagNamePattern?: string; +} = {}, queryKey?: Array) => [useDagServiceGetDagTagsKey, ...(queryKey ?? [{ limit, offset, orderBy, tagNamePattern }])]; +export type DagServiceGetDagsUiDefaultResponse = Awaited>; +export type DagServiceGetDagsUiQueryResult = UseQueryResult; +export const useDagServiceGetDagsUiKey = "DagServiceGetDagsUi"; +export const UseDagServiceGetDagsUiKeyFn = ({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}, queryKey?: Array) => [useDagServiceGetDagsUiKey, ...(queryKey ?? [{ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }])]; +export type DagServiceGetLatestRunInfoDefaultResponse = Awaited>; +export type DagServiceGetLatestRunInfoQueryResult = UseQueryResult; +export const useDagServiceGetLatestRunInfoKey = "DagServiceGetLatestRunInfo"; +export const UseDagServiceGetLatestRunInfoKeyFn = ({ dagId }: { + dagId: string; +}, queryKey?: Array) => [useDagServiceGetLatestRunInfoKey, ...(queryKey ?? [{ dagId }])]; +export type EventLogServiceGetEventLogDefaultResponse = Awaited>; +export type EventLogServiceGetEventLogQueryResult = UseQueryResult; export const useEventLogServiceGetEventLogKey = "EventLogServiceGetEventLog"; -export const UseEventLogServiceGetEventLogKeyFn = ( - { - eventLogId, - }: { - eventLogId: number; - }, - queryKey?: Array, -) => [useEventLogServiceGetEventLogKey, ...(queryKey ?? [{ eventLogId }])]; -export type EventLogServiceGetEventLogsDefaultResponse = Awaited< - ReturnType ->; -export type EventLogServiceGetEventLogsQueryResult< - TData = EventLogServiceGetEventLogsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseEventLogServiceGetEventLogKeyFn = ({ eventLogId }: { + eventLogId: number; +}, queryKey?: Array) => [useEventLogServiceGetEventLogKey, ...(queryKey ?? [{ eventLogId }])]; +export type EventLogServiceGetEventLogsDefaultResponse = Awaited>; +export type EventLogServiceGetEventLogsQueryResult = UseQueryResult; export const useEventLogServiceGetEventLogsKey = "EventLogServiceGetEventLogs"; -export const UseEventLogServiceGetEventLogsKeyFn = ( - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; - } = {}, - queryKey?: Array, -) => [ - useEventLogServiceGetEventLogsKey, - ...(queryKey ?? [ - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }, - ]), -]; -export type ExtraLinksServiceGetExtraLinksDefaultResponse = Awaited< - ReturnType ->; -export type ExtraLinksServiceGetExtraLinksQueryResult< - TData = ExtraLinksServiceGetExtraLinksDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseEventLogServiceGetEventLogsKeyFn = ({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string[]; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; +} = {}, queryKey?: Array) => [useEventLogServiceGetEventLogsKey, ...(queryKey ?? [{ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }])]; +export type ExtraLinksServiceGetExtraLinksDefaultResponse = Awaited>; +export type ExtraLinksServiceGetExtraLinksQueryResult = UseQueryResult; export const useExtraLinksServiceGetExtraLinksKey = "ExtraLinksServiceGetExtraLinks"; -export const UseExtraLinksServiceGetExtraLinksKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: Array, -) => [useExtraLinksServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetExtraLinksDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetExtraLinksQueryResult< - TData = TaskInstanceServiceGetExtraLinksDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseExtraLinksServiceGetExtraLinksKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: Array) => [useExtraLinksServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetExtraLinksDefaultResponse = Awaited>; +export type TaskInstanceServiceGetExtraLinksQueryResult = UseQueryResult; export const useTaskInstanceServiceGetExtraLinksKey = "TaskInstanceServiceGetExtraLinks"; -export const UseTaskInstanceServiceGetExtraLinksKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: Array, -) => [useTaskInstanceServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetTaskInstanceDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetTaskInstanceQueryResult< - TData = TaskInstanceServiceGetTaskInstanceDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseTaskInstanceServiceGetExtraLinksKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: Array) => [useTaskInstanceServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetTaskInstanceDefaultResponse = Awaited>; +export type TaskInstanceServiceGetTaskInstanceQueryResult = UseQueryResult; export const useTaskInstanceServiceGetTaskInstanceKey = "TaskInstanceServiceGetTaskInstance"; -export const UseTaskInstanceServiceGetTaskInstanceKeyFn = ( - { - dagId, - dagRunId, - taskId, - }: { - dagId: string; - dagRunId: string; - taskId: string; - }, - queryKey?: Array, -) => [useTaskInstanceServiceGetTaskInstanceKey, ...(queryKey ?? [{ dagId, dagRunId, taskId }])]; -export type TaskInstanceServiceGetMappedTaskInstancesDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetMappedTaskInstancesQueryResult< - TData = TaskInstanceServiceGetMappedTaskInstancesDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseTaskInstanceServiceGetTaskInstanceKeyFn = ({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceKey, ...(queryKey ?? [{ dagId, dagRunId, taskId }])]; +export type TaskInstanceServiceGetMappedTaskInstancesDefaultResponse = Awaited>; +export type TaskInstanceServiceGetMappedTaskInstancesQueryResult = UseQueryResult; export const useTaskInstanceServiceGetMappedTaskInstancesKey = "TaskInstanceServiceGetMappedTaskInstances"; -export const UseTaskInstanceServiceGetMappedTaskInstancesKeyFn = ( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetMappedTaskInstancesKey, - ...(queryKey ?? [ - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }, - ]), -]; -export type TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetTaskInstanceDependenciesQueryResult< - TData = TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useTaskInstanceServiceGetTaskInstanceDependenciesKey = - "TaskInstanceServiceGetTaskInstanceDependencies"; -export const UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetTaskInstanceDependenciesKey, - ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), -]; -export type TaskInstanceServiceGetTaskInstanceDependencies1DefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetTaskInstanceDependencies1QueryResult< - TData = TaskInstanceServiceGetTaskInstanceDependencies1DefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useTaskInstanceServiceGetTaskInstanceDependencies1Key = - "TaskInstanceServiceGetTaskInstanceDependencies1"; -export const UseTaskInstanceServiceGetTaskInstanceDependencies1KeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetTaskInstanceDependencies1Key, - ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), -]; -export type TaskInstanceServiceGetTaskInstanceTriesDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetTaskInstanceTriesQueryResult< - TData = TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseTaskInstanceServiceGetMappedTaskInstancesKeyFn = ({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstancesKey, ...(queryKey ?? [{ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }])]; +export type TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexDefaultResponse = Awaited>; +export type TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexQueryResult = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKey = "TaskInstanceServiceGetTaskInstanceDependenciesByMapIndex"; +export const UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse = Awaited>; +export type TaskInstanceServiceGetTaskInstanceDependenciesQueryResult = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceDependenciesKey = "TaskInstanceServiceGetTaskInstanceDependencies"; +export const UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceDependenciesKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetTaskInstanceTriesDefaultResponse = Awaited>; +export type TaskInstanceServiceGetTaskInstanceTriesQueryResult = UseQueryResult; export const useTaskInstanceServiceGetTaskInstanceTriesKey = "TaskInstanceServiceGetTaskInstanceTries"; -export const UseTaskInstanceServiceGetTaskInstanceTriesKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetTaskInstanceTriesKey, - ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), -]; -export type TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetMappedTaskInstanceTriesQueryResult< - TData = TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useTaskInstanceServiceGetMappedTaskInstanceTriesKey = - "TaskInstanceServiceGetMappedTaskInstanceTries"; -export const UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetMappedTaskInstanceTriesKey, - ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), -]; -export type TaskInstanceServiceGetMappedTaskInstanceDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetMappedTaskInstanceQueryResult< - TData = TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseTaskInstanceServiceGetTaskInstanceTriesKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceTriesKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse = Awaited>; +export type TaskInstanceServiceGetMappedTaskInstanceTriesQueryResult = UseQueryResult; +export const useTaskInstanceServiceGetMappedTaskInstanceTriesKey = "TaskInstanceServiceGetMappedTaskInstanceTries"; +export const UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstanceTriesKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetMappedTaskInstanceDefaultResponse = Awaited>; +export type TaskInstanceServiceGetMappedTaskInstanceQueryResult = UseQueryResult; export const useTaskInstanceServiceGetMappedTaskInstanceKey = "TaskInstanceServiceGetMappedTaskInstance"; -export const UseTaskInstanceServiceGetMappedTaskInstanceKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetMappedTaskInstanceKey, - ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), -]; -export type TaskInstanceServiceGetTaskInstancesDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetTaskInstancesQueryResult< - TData = TaskInstanceServiceGetTaskInstancesDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseTaskInstanceServiceGetMappedTaskInstanceKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstanceKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetTaskInstancesDefaultResponse = Awaited>; +export type TaskInstanceServiceGetTaskInstancesQueryResult = UseQueryResult; export const useTaskInstanceServiceGetTaskInstancesKey = "TaskInstanceServiceGetTaskInstances"; -export const UseTaskInstanceServiceGetTaskInstancesKeyFn = ( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetTaskInstancesKey, - ...(queryKey ?? [ - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }, - ]), -]; -export type TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetTaskInstanceTryDetailsQueryResult< - TData = TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useTaskInstanceServiceGetTaskInstanceTryDetailsKey = - "TaskInstanceServiceGetTaskInstanceTryDetails"; -export const UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetTaskInstanceTryDetailsKey, - ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }]), -]; -export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsQueryResult< - TData = TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey = - "TaskInstanceServiceGetMappedTaskInstanceTryDetails"; -export const UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn = ( - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey, - ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }]), -]; +export const UseTaskInstanceServiceGetTaskInstancesKeyFn = ({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstancesKey, ...(queryKey ?? [{ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }])]; +export type TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse = Awaited>; +export type TaskInstanceServiceGetTaskInstanceTryDetailsQueryResult = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceTryDetailsKey = "TaskInstanceServiceGetTaskInstanceTryDetails"; +export const UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn = ({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; +}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceTryDetailsKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }])]; +export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse = Awaited>; +export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsQueryResult = UseQueryResult; +export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey = "TaskInstanceServiceGetMappedTaskInstanceTryDetails"; +export const UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn = ({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; +}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }])]; export type TaskInstanceServiceGetLogDefaultResponse = Awaited>; -export type TaskInstanceServiceGetLogQueryResult< - TData = TaskInstanceServiceGetLogDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type TaskInstanceServiceGetLogQueryResult = UseQueryResult; export const useTaskInstanceServiceGetLogKey = "TaskInstanceServiceGetLog"; -export const UseTaskInstanceServiceGetLogKeyFn = ( - { - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; - }, - queryKey?: Array, -) => [ - useTaskInstanceServiceGetLogKey, - ...(queryKey ?? [{ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }]), -]; -export type ImportErrorServiceGetImportErrorDefaultResponse = Awaited< - ReturnType ->; -export type ImportErrorServiceGetImportErrorQueryResult< - TData = ImportErrorServiceGetImportErrorDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseTaskInstanceServiceGetLogKeyFn = ({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; +}, queryKey?: Array) => [useTaskInstanceServiceGetLogKey, ...(queryKey ?? [{ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }])]; +export type TaskInstanceServiceGetExternalLogUrlDefaultResponse = Awaited>; +export type TaskInstanceServiceGetExternalLogUrlQueryResult = UseQueryResult; +export const useTaskInstanceServiceGetExternalLogUrlKey = "TaskInstanceServiceGetExternalLogUrl"; +export const UseTaskInstanceServiceGetExternalLogUrlKeyFn = ({ dagId, dagRunId, mapIndex, taskId, tryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + tryNumber: number; +}, queryKey?: Array) => [useTaskInstanceServiceGetExternalLogUrlKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, tryNumber }])]; +export type ImportErrorServiceGetImportErrorDefaultResponse = Awaited>; +export type ImportErrorServiceGetImportErrorQueryResult = UseQueryResult; export const useImportErrorServiceGetImportErrorKey = "ImportErrorServiceGetImportError"; -export const UseImportErrorServiceGetImportErrorKeyFn = ( - { - importErrorId, - }: { - importErrorId: number; - }, - queryKey?: Array, -) => [useImportErrorServiceGetImportErrorKey, ...(queryKey ?? [{ importErrorId }])]; -export type ImportErrorServiceGetImportErrorsDefaultResponse = Awaited< - ReturnType ->; -export type ImportErrorServiceGetImportErrorsQueryResult< - TData = ImportErrorServiceGetImportErrorsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseImportErrorServiceGetImportErrorKeyFn = ({ importErrorId }: { + importErrorId: number; +}, queryKey?: Array) => [useImportErrorServiceGetImportErrorKey, ...(queryKey ?? [{ importErrorId }])]; +export type ImportErrorServiceGetImportErrorsDefaultResponse = Awaited>; +export type ImportErrorServiceGetImportErrorsQueryResult = UseQueryResult; export const useImportErrorServiceGetImportErrorsKey = "ImportErrorServiceGetImportErrors"; -export const UseImportErrorServiceGetImportErrorsKeyFn = ( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: Array, -) => [useImportErrorServiceGetImportErrorsKey, ...(queryKey ?? [{ limit, offset, orderBy }])]; +export const UseImportErrorServiceGetImportErrorsKeyFn = ({ limit, offset, orderBy }: { + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: Array) => [useImportErrorServiceGetImportErrorsKey, ...(queryKey ?? [{ limit, offset, orderBy }])]; export type JobServiceGetJobsDefaultResponse = Awaited>; -export type JobServiceGetJobsQueryResult< - TData = JobServiceGetJobsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type JobServiceGetJobsQueryResult = UseQueryResult; export const useJobServiceGetJobsKey = "JobServiceGetJobs"; -export const UseJobServiceGetJobsKeyFn = ( - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; - } = {}, - queryKey?: Array, -) => [ - useJobServiceGetJobsKey, - ...(queryKey ?? [ - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }, - ]), -]; +export const UseJobServiceGetJobsKeyFn = ({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + startDateGte?: string; + startDateLte?: string; +} = {}, queryKey?: Array) => [useJobServiceGetJobsKey, ...(queryKey ?? [{ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }])]; export type PluginServiceGetPluginsDefaultResponse = Awaited>; -export type PluginServiceGetPluginsQueryResult< - TData = PluginServiceGetPluginsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type PluginServiceGetPluginsQueryResult = UseQueryResult; export const usePluginServiceGetPluginsKey = "PluginServiceGetPlugins"; -export const UsePluginServiceGetPluginsKeyFn = ( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: Array, -) => [usePluginServiceGetPluginsKey, ...(queryKey ?? [{ limit, offset }])]; +export const UsePluginServiceGetPluginsKeyFn = ({ limit, offset }: { + limit?: number; + offset?: number; +} = {}, queryKey?: Array) => [usePluginServiceGetPluginsKey, ...(queryKey ?? [{ limit, offset }])]; +export type PluginServiceImportErrorsDefaultResponse = Awaited>; +export type PluginServiceImportErrorsQueryResult = UseQueryResult; +export const usePluginServiceImportErrorsKey = "PluginServiceImportErrors"; +export const UsePluginServiceImportErrorsKeyFn = (queryKey?: Array) => [usePluginServiceImportErrorsKey, ...(queryKey ?? [])]; export type PoolServiceGetPoolDefaultResponse = Awaited>; -export type PoolServiceGetPoolQueryResult< - TData = PoolServiceGetPoolDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type PoolServiceGetPoolQueryResult = UseQueryResult; export const usePoolServiceGetPoolKey = "PoolServiceGetPool"; -export const UsePoolServiceGetPoolKeyFn = ( - { - poolName, - }: { - poolName: string; - }, - queryKey?: Array, -) => [usePoolServiceGetPoolKey, ...(queryKey ?? [{ poolName }])]; +export const UsePoolServiceGetPoolKeyFn = ({ poolName }: { + poolName: string; +}, queryKey?: Array) => [usePoolServiceGetPoolKey, ...(queryKey ?? [{ poolName }])]; export type PoolServiceGetPoolsDefaultResponse = Awaited>; -export type PoolServiceGetPoolsQueryResult< - TData = PoolServiceGetPoolsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type PoolServiceGetPoolsQueryResult = UseQueryResult; export const usePoolServiceGetPoolsKey = "PoolServiceGetPools"; -export const UsePoolServiceGetPoolsKeyFn = ( - { - limit, - offset, - orderBy, - poolNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; - } = {}, - queryKey?: Array, -) => [usePoolServiceGetPoolsKey, ...(queryKey ?? [{ limit, offset, orderBy, poolNamePattern }])]; -export type ProviderServiceGetProvidersDefaultResponse = Awaited< - ReturnType ->; -export type ProviderServiceGetProvidersQueryResult< - TData = ProviderServiceGetProvidersDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UsePoolServiceGetPoolsKeyFn = ({ limit, offset, orderBy, poolNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + poolNamePattern?: string; +} = {}, queryKey?: Array) => [usePoolServiceGetPoolsKey, ...(queryKey ?? [{ limit, offset, orderBy, poolNamePattern }])]; +export type ProviderServiceGetProvidersDefaultResponse = Awaited>; +export type ProviderServiceGetProvidersQueryResult = UseQueryResult; export const useProviderServiceGetProvidersKey = "ProviderServiceGetProviders"; -export const UseProviderServiceGetProvidersKeyFn = ( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: Array, -) => [useProviderServiceGetProvidersKey, ...(queryKey ?? [{ limit, offset }])]; +export const UseProviderServiceGetProvidersKeyFn = ({ limit, offset }: { + limit?: number; + offset?: number; +} = {}, queryKey?: Array) => [useProviderServiceGetProvidersKey, ...(queryKey ?? [{ limit, offset }])]; export type XcomServiceGetXcomEntryDefaultResponse = Awaited>; -export type XcomServiceGetXcomEntryQueryResult< - TData = XcomServiceGetXcomEntryDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type XcomServiceGetXcomEntryQueryResult = UseQueryResult; export const useXcomServiceGetXcomEntryKey = "XcomServiceGetXcomEntry"; -export const UseXcomServiceGetXcomEntryKeyFn = ( - { - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; - }, - queryKey?: Array, -) => [ - useXcomServiceGetXcomEntryKey, - ...(queryKey ?? [{ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }]), -]; +export const UseXcomServiceGetXcomEntryKeyFn = ({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; +}, queryKey?: Array) => [useXcomServiceGetXcomEntryKey, ...(queryKey ?? [{ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }])]; export type XcomServiceGetXcomEntriesDefaultResponse = Awaited>; -export type XcomServiceGetXcomEntriesQueryResult< - TData = XcomServiceGetXcomEntriesDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type XcomServiceGetXcomEntriesQueryResult = UseQueryResult; export const useXcomServiceGetXcomEntriesKey = "XcomServiceGetXcomEntries"; -export const UseXcomServiceGetXcomEntriesKeyFn = ( - { - dagId, - dagRunId, - limit, - mapIndex, - offset, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; - }, - queryKey?: Array, -) => [ - useXcomServiceGetXcomEntriesKey, - ...(queryKey ?? [{ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }]), -]; +export const UseXcomServiceGetXcomEntriesKeyFn = ({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; +}, queryKey?: Array) => [useXcomServiceGetXcomEntriesKey, ...(queryKey ?? [{ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }])]; export type TaskServiceGetTasksDefaultResponse = Awaited>; -export type TaskServiceGetTasksQueryResult< - TData = TaskServiceGetTasksDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type TaskServiceGetTasksQueryResult = UseQueryResult; export const useTaskServiceGetTasksKey = "TaskServiceGetTasks"; -export const UseTaskServiceGetTasksKeyFn = ( - { - dagId, - orderBy, - }: { - dagId: string; - orderBy?: string; - }, - queryKey?: Array, -) => [useTaskServiceGetTasksKey, ...(queryKey ?? [{ dagId, orderBy }])]; +export const UseTaskServiceGetTasksKeyFn = ({ dagId, orderBy }: { + dagId: string; + orderBy?: string; +}, queryKey?: Array) => [useTaskServiceGetTasksKey, ...(queryKey ?? [{ dagId, orderBy }])]; export type TaskServiceGetTaskDefaultResponse = Awaited>; -export type TaskServiceGetTaskQueryResult< - TData = TaskServiceGetTaskDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type TaskServiceGetTaskQueryResult = UseQueryResult; export const useTaskServiceGetTaskKey = "TaskServiceGetTask"; -export const UseTaskServiceGetTaskKeyFn = ( - { - dagId, - taskId, - }: { - dagId: string; - taskId: unknown; - }, - queryKey?: Array, -) => [useTaskServiceGetTaskKey, ...(queryKey ?? [{ dagId, taskId }])]; -export type VariableServiceGetVariableDefaultResponse = Awaited< - ReturnType ->; -export type VariableServiceGetVariableQueryResult< - TData = VariableServiceGetVariableDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseTaskServiceGetTaskKeyFn = ({ dagId, taskId }: { + dagId: string; + taskId: unknown; +}, queryKey?: Array) => [useTaskServiceGetTaskKey, ...(queryKey ?? [{ dagId, taskId }])]; +export type VariableServiceGetVariableDefaultResponse = Awaited>; +export type VariableServiceGetVariableQueryResult = UseQueryResult; export const useVariableServiceGetVariableKey = "VariableServiceGetVariable"; -export const UseVariableServiceGetVariableKeyFn = ( - { - variableKey, - }: { - variableKey: string; - }, - queryKey?: Array, -) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; -export type VariableServiceGetVariablesDefaultResponse = Awaited< - ReturnType ->; -export type VariableServiceGetVariablesQueryResult< - TData = VariableServiceGetVariablesDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseVariableServiceGetVariableKeyFn = ({ variableKey }: { + variableKey: string; +}, queryKey?: Array) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; +export type VariableServiceGetVariablesDefaultResponse = Awaited>; +export type VariableServiceGetVariablesQueryResult = UseQueryResult; export const useVariableServiceGetVariablesKey = "VariableServiceGetVariables"; -export const UseVariableServiceGetVariablesKeyFn = ( - { - limit, - offset, - orderBy, - variableKeyPattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; - } = {}, - queryKey?: Array, -) => [useVariableServiceGetVariablesKey, ...(queryKey ?? [{ limit, offset, orderBy, variableKeyPattern }])]; -export type DagVersionServiceGetDagVersionDefaultResponse = Awaited< - ReturnType ->; -export type DagVersionServiceGetDagVersionQueryResult< - TData = DagVersionServiceGetDagVersionDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseVariableServiceGetVariablesKeyFn = ({ limit, offset, orderBy, variableKeyPattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + variableKeyPattern?: string; +} = {}, queryKey?: Array) => [useVariableServiceGetVariablesKey, ...(queryKey ?? [{ limit, offset, orderBy, variableKeyPattern }])]; +export type DagVersionServiceGetDagVersionDefaultResponse = Awaited>; +export type DagVersionServiceGetDagVersionQueryResult = UseQueryResult; export const useDagVersionServiceGetDagVersionKey = "DagVersionServiceGetDagVersion"; -export const UseDagVersionServiceGetDagVersionKeyFn = ( - { - dagId, - versionNumber, - }: { - dagId: string; - versionNumber: number; - }, - queryKey?: Array, -) => [useDagVersionServiceGetDagVersionKey, ...(queryKey ?? [{ dagId, versionNumber }])]; -export type DagVersionServiceGetDagVersionsDefaultResponse = Awaited< - ReturnType ->; -export type DagVersionServiceGetDagVersionsQueryResult< - TData = DagVersionServiceGetDagVersionsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDagVersionServiceGetDagVersionKeyFn = ({ dagId, versionNumber }: { + dagId: string; + versionNumber: number; +}, queryKey?: Array) => [useDagVersionServiceGetDagVersionKey, ...(queryKey ?? [{ dagId, versionNumber }])]; +export type DagVersionServiceGetDagVersionsDefaultResponse = Awaited>; +export type DagVersionServiceGetDagVersionsQueryResult = UseQueryResult; export const useDagVersionServiceGetDagVersionsKey = "DagVersionServiceGetDagVersions"; -export const UseDagVersionServiceGetDagVersionsKeyFn = ( - { - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; - }, - queryKey?: Array, -) => [ - useDagVersionServiceGetDagVersionsKey, - ...(queryKey ?? [{ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }]), -]; +export const UseDagVersionServiceGetDagVersionsKeyFn = ({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + versionNumber?: number; +}, queryKey?: Array) => [useDagVersionServiceGetDagVersionsKey, ...(queryKey ?? [{ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }])]; +export type HumanInTheLoopServiceGetHitlDetailDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlDetailQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlDetailKey = "HumanInTheLoopServiceGetHitlDetail"; +export const UseHumanInTheLoopServiceGetHitlDetailKeyFn = ({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailKey, ...(queryKey ?? [{ dagId, dagRunId, taskId }])]; +export type HumanInTheLoopServiceGetMappedTiHitlDetailDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetMappedTiHitlDetailQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetMappedTiHitlDetailKey = "HumanInTheLoopServiceGetMappedTiHitlDetail"; +export const UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetMappedTiHitlDetailKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type HumanInTheLoopServiceGetHitlDetailsDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlDetailsQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlDetailsKey = "HumanInTheLoopServiceGetHitlDetails"; +export const UseHumanInTheLoopServiceGetHitlDetailsKeyFn = ({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }: { + bodySearch?: string; + dagId?: string; + dagIdPattern?: string; + dagRunId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + responseReceived?: boolean; + state?: string[]; + subjectSearch?: string; + taskId?: string; + taskIdPattern?: string; + userId?: string[]; +} = {}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailsKey, ...(queryKey ?? [{ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }])]; export type MonitorServiceGetHealthDefaultResponse = Awaited>; -export type MonitorServiceGetHealthQueryResult< - TData = MonitorServiceGetHealthDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type MonitorServiceGetHealthQueryResult = UseQueryResult; export const useMonitorServiceGetHealthKey = "MonitorServiceGetHealth"; -export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [ - useMonitorServiceGetHealthKey, - ...(queryKey ?? []), -]; +export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [useMonitorServiceGetHealthKey, ...(queryKey ?? [])]; export type VersionServiceGetVersionDefaultResponse = Awaited>; -export type VersionServiceGetVersionQueryResult< - TData = VersionServiceGetVersionDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type VersionServiceGetVersionQueryResult = UseQueryResult; export const useVersionServiceGetVersionKey = "VersionServiceGetVersion"; -export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ - useVersionServiceGetVersionKey, - ...(queryKey ?? []), -]; +export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [useVersionServiceGetVersionKey, ...(queryKey ?? [])]; export type LoginServiceLoginDefaultResponse = Awaited>; -export type LoginServiceLoginQueryResult< - TData = LoginServiceLoginDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type LoginServiceLoginQueryResult = UseQueryResult; export const useLoginServiceLoginKey = "LoginServiceLogin"; -export const UseLoginServiceLoginKeyFn = ( - { - next, - }: { - next?: string; - } = {}, - queryKey?: Array, -) => [useLoginServiceLoginKey, ...(queryKey ?? [{ next }])]; +export const UseLoginServiceLoginKeyFn = ({ next }: { + next?: string; +} = {}, queryKey?: Array) => [useLoginServiceLoginKey, ...(queryKey ?? [{ next }])]; export type LoginServiceLogoutDefaultResponse = Awaited>; -export type LoginServiceLogoutQueryResult< - TData = LoginServiceLogoutDefaultResponse, - TError = unknown, -> = UseQueryResult; +export type LoginServiceLogoutQueryResult = UseQueryResult; export const useLoginServiceLogoutKey = "LoginServiceLogout"; -export const UseLoginServiceLogoutKeyFn = ( - { - next, - }: { - next?: string; - } = {}, - queryKey?: Array, -) => [useLoginServiceLogoutKey, ...(queryKey ?? [{ next }])]; -export type AuthLinksServiceGetAuthMenusDefaultResponse = Awaited< - ReturnType ->; -export type AuthLinksServiceGetAuthMenusQueryResult< - TData = AuthLinksServiceGetAuthMenusDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseLoginServiceLogoutKeyFn = ({ next }: { + next?: string; +} = {}, queryKey?: Array) => [useLoginServiceLogoutKey, ...(queryKey ?? [{ next }])]; +export type LoginServiceRefreshDefaultResponse = Awaited>; +export type LoginServiceRefreshQueryResult = UseQueryResult; +export const useLoginServiceRefreshKey = "LoginServiceRefresh"; +export const UseLoginServiceRefreshKeyFn = ({ next }: { + next?: string; +} = {}, queryKey?: Array) => [useLoginServiceRefreshKey, ...(queryKey ?? [{ next }])]; +export type AuthLinksServiceGetAuthMenusDefaultResponse = Awaited>; +export type AuthLinksServiceGetAuthMenusQueryResult = UseQueryResult; export const useAuthLinksServiceGetAuthMenusKey = "AuthLinksServiceGetAuthMenus"; -export const UseAuthLinksServiceGetAuthMenusKeyFn = (queryKey?: Array) => [ - useAuthLinksServiceGetAuthMenusKey, - ...(queryKey ?? []), -]; -export type DagsServiceRecentDagRunsDefaultResponse = Awaited>; -export type DagsServiceRecentDagRunsQueryResult< - TData = DagsServiceRecentDagRunsDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useDagsServiceRecentDagRunsKey = "DagsServiceRecentDagRuns"; -export const UseDagsServiceRecentDagRunsKeyFn = ( - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, - queryKey?: Array, -) => [ - useDagsServiceRecentDagRunsKey, - ...(queryKey ?? [ - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }, - ]), -]; -export type DependenciesServiceGetDependenciesDefaultResponse = Awaited< - ReturnType ->; -export type DependenciesServiceGetDependenciesQueryResult< - TData = DependenciesServiceGetDependenciesDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseAuthLinksServiceGetAuthMenusKeyFn = (queryKey?: Array) => [useAuthLinksServiceGetAuthMenusKey, ...(queryKey ?? [])]; +export type DependenciesServiceGetDependenciesDefaultResponse = Awaited>; +export type DependenciesServiceGetDependenciesQueryResult = UseQueryResult; export const useDependenciesServiceGetDependenciesKey = "DependenciesServiceGetDependencies"; -export const UseDependenciesServiceGetDependenciesKeyFn = ( - { - nodeId, - }: { - nodeId?: string; - } = {}, - queryKey?: Array, -) => [useDependenciesServiceGetDependenciesKey, ...(queryKey ?? [{ nodeId }])]; -export type DashboardServiceHistoricalMetricsDefaultResponse = Awaited< - ReturnType ->; -export type DashboardServiceHistoricalMetricsQueryResult< - TData = DashboardServiceHistoricalMetricsDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDependenciesServiceGetDependenciesKeyFn = ({ nodeId }: { + nodeId?: string; +} = {}, queryKey?: Array) => [useDependenciesServiceGetDependenciesKey, ...(queryKey ?? [{ nodeId }])]; +export type DashboardServiceHistoricalMetricsDefaultResponse = Awaited>; +export type DashboardServiceHistoricalMetricsQueryResult = UseQueryResult; export const useDashboardServiceHistoricalMetricsKey = "DashboardServiceHistoricalMetrics"; -export const UseDashboardServiceHistoricalMetricsKeyFn = ( - { - endDate, - startDate, - }: { - endDate?: string; - startDate: string; - }, - queryKey?: Array, -) => [useDashboardServiceHistoricalMetricsKey, ...(queryKey ?? [{ endDate, startDate }])]; -export type StructureServiceStructureDataDefaultResponse = Awaited< - ReturnType ->; -export type StructureServiceStructureDataQueryResult< - TData = StructureServiceStructureDataDefaultResponse, - TError = unknown, -> = UseQueryResult; +export const UseDashboardServiceHistoricalMetricsKeyFn = ({ endDate, startDate }: { + endDate?: string; + startDate: string; +}, queryKey?: Array) => [useDashboardServiceHistoricalMetricsKey, ...(queryKey ?? [{ endDate, startDate }])]; +export type DashboardServiceDagStatsDefaultResponse = Awaited>; +export type DashboardServiceDagStatsQueryResult = UseQueryResult; +export const useDashboardServiceDagStatsKey = "DashboardServiceDagStats"; +export const UseDashboardServiceDagStatsKeyFn = (queryKey?: Array) => [useDashboardServiceDagStatsKey, ...(queryKey ?? [])]; +export type StructureServiceStructureDataDefaultResponse = Awaited>; +export type StructureServiceStructureDataQueryResult = UseQueryResult; export const useStructureServiceStructureDataKey = "StructureServiceStructureData"; -export const UseStructureServiceStructureDataKeyFn = ( - { - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; - }, - queryKey?: Array, -) => [ - useStructureServiceStructureDataKey, - ...(queryKey ?? [{ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }]), -]; -export type GridServiceGridDataDefaultResponse = Awaited>; -export type GridServiceGridDataQueryResult< - TData = GridServiceGridDataDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useGridServiceGridDataKey = "GridServiceGridData"; -export const UseGridServiceGridDataKeyFn = ( - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; - }, - queryKey?: Array, -) => [ - useGridServiceGridDataKey, - ...(queryKey ?? [ - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }, - ]), -]; -export type AssetServiceCreateAssetEventMutationResult = Awaited< - ReturnType ->; -export type AssetServiceMaterializeAssetMutationResult = Awaited< - ReturnType ->; -export type BackfillServiceCreateBackfillMutationResult = Awaited< - ReturnType ->; -export type BackfillServiceCreateBackfillDryRunMutationResult = Awaited< - ReturnType ->; -export type ConnectionServicePostConnectionMutationResult = Awaited< - ReturnType ->; -export type ConnectionServiceTestConnectionMutationResult = Awaited< - ReturnType ->; -export type ConnectionServiceCreateDefaultConnectionsMutationResult = Awaited< - ReturnType ->; +export const UseStructureServiceStructureDataKeyFn = ({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; +}, queryKey?: Array) => [useStructureServiceStructureDataKey, ...(queryKey ?? [{ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }])]; +export type GridServiceGetDagStructureDefaultResponse = Awaited>; +export type GridServiceGetDagStructureQueryResult = UseQueryResult; +export const useGridServiceGetDagStructureKey = "GridServiceGetDagStructure"; +export const UseGridServiceGetDagStructureKeyFn = ({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}, queryKey?: Array) => [useGridServiceGetDagStructureKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }])]; +export type GridServiceGetGridRunsDefaultResponse = Awaited>; +export type GridServiceGetGridRunsQueryResult = UseQueryResult; +export const useGridServiceGetGridRunsKey = "GridServiceGetGridRuns"; +export const UseGridServiceGetGridRunsKeyFn = ({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}, queryKey?: Array) => [useGridServiceGetGridRunsKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }])]; +export type GridServiceGetGridTiSummariesDefaultResponse = Awaited>; +export type GridServiceGetGridTiSummariesQueryResult = UseQueryResult; +export const useGridServiceGetGridTiSummariesKey = "GridServiceGetGridTiSummaries"; +export const UseGridServiceGetGridTiSummariesKeyFn = ({ dagId, runId }: { + dagId: string; + runId: string; +}, queryKey?: Array) => [useGridServiceGetGridTiSummariesKey, ...(queryKey ?? [{ dagId, runId }])]; +export type CalendarServiceGetCalendarDefaultResponse = Awaited>; +export type CalendarServiceGetCalendarQueryResult = UseQueryResult; +export const useCalendarServiceGetCalendarKey = "CalendarServiceGetCalendar"; +export const UseCalendarServiceGetCalendarKeyFn = ({ dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}, queryKey?: Array) => [useCalendarServiceGetCalendarKey, ...(queryKey ?? [{ dagId, granularity, logicalDateGte, logicalDateLte }])]; +export type AssetServiceCreateAssetEventMutationResult = Awaited>; +export type AssetServiceMaterializeAssetMutationResult = Awaited>; +export type BackfillServiceCreateBackfillMutationResult = Awaited>; +export type BackfillServiceCreateBackfillDryRunMutationResult = Awaited>; +export type ConnectionServicePostConnectionMutationResult = Awaited>; +export type ConnectionServiceTestConnectionMutationResult = Awaited>; +export type ConnectionServiceCreateDefaultConnectionsMutationResult = Awaited>; export type DagRunServiceClearDagRunMutationResult = Awaited>; -export type DagRunServiceTriggerDagRunMutationResult = Awaited< - ReturnType ->; -export type DagRunServiceGetListDagRunsBatchMutationResult = Awaited< - ReturnType ->; -export type TaskInstanceServiceGetTaskInstancesBatchMutationResult = Awaited< - ReturnType ->; -export type TaskInstanceServicePostClearTaskInstancesMutationResult = Awaited< - ReturnType ->; +export type DagRunServiceTriggerDagRunMutationResult = Awaited>; +export type DagRunServiceGetListDagRunsBatchMutationResult = Awaited>; +export type DagServiceFavoriteDagMutationResult = Awaited>; +export type DagServiceUnfavoriteDagMutationResult = Awaited>; +export type TaskInstanceServiceGetTaskInstancesBatchMutationResult = Awaited>; +export type TaskInstanceServicePostClearTaskInstancesMutationResult = Awaited>; export type PoolServicePostPoolMutationResult = Awaited>; -export type XcomServiceCreateXcomEntryMutationResult = Awaited< - ReturnType ->; -export type VariableServicePostVariableMutationResult = Awaited< - ReturnType ->; -export type BackfillServicePauseBackfillMutationResult = Awaited< - ReturnType ->; -export type BackfillServiceUnpauseBackfillMutationResult = Awaited< - ReturnType ->; -export type BackfillServiceCancelBackfillMutationResult = Awaited< - ReturnType ->; -export type DagParsingServiceReparseDagFileMutationResult = Awaited< - ReturnType ->; -export type ConnectionServicePatchConnectionMutationResult = Awaited< - ReturnType ->; -export type ConnectionServiceBulkConnectionsMutationResult = Awaited< - ReturnType ->; +export type XcomServiceCreateXcomEntryMutationResult = Awaited>; +export type VariableServicePostVariableMutationResult = Awaited>; +export type BackfillServicePauseBackfillMutationResult = Awaited>; +export type BackfillServiceUnpauseBackfillMutationResult = Awaited>; +export type BackfillServiceCancelBackfillMutationResult = Awaited>; +export type DagParsingServiceReparseDagFileMutationResult = Awaited>; +export type ConnectionServicePatchConnectionMutationResult = Awaited>; +export type ConnectionServiceBulkConnectionsMutationResult = Awaited>; export type DagRunServicePatchDagRunMutationResult = Awaited>; export type DagServicePatchDagsMutationResult = Awaited>; export type DagServicePatchDagMutationResult = Awaited>; -export type TaskInstanceServicePatchTaskInstanceMutationResult = Awaited< - ReturnType ->; -export type TaskInstanceServicePatchTaskInstance1MutationResult = Awaited< - ReturnType ->; -export type TaskInstanceServicePatchTaskInstanceDryRunMutationResult = Awaited< - ReturnType ->; -export type TaskInstanceServicePatchTaskInstanceDryRun1MutationResult = Awaited< - ReturnType ->; +export type TaskInstanceServicePatchTaskInstanceMutationResult = Awaited>; +export type TaskInstanceServicePatchTaskInstanceByMapIndexMutationResult = Awaited>; +export type TaskInstanceServiceBulkTaskInstancesMutationResult = Awaited>; +export type TaskInstanceServicePatchTaskInstanceDryRunByMapIndexMutationResult = Awaited>; +export type TaskInstanceServicePatchTaskInstanceDryRunMutationResult = Awaited>; export type PoolServicePatchPoolMutationResult = Awaited>; export type PoolServiceBulkPoolsMutationResult = Awaited>; -export type XcomServiceUpdateXcomEntryMutationResult = Awaited< - ReturnType ->; -export type VariableServicePatchVariableMutationResult = Awaited< - ReturnType ->; -export type VariableServiceBulkVariablesMutationResult = Awaited< - ReturnType ->; -export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited< - ReturnType ->; -export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited< - ReturnType ->; -export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited< - ReturnType ->; -export type ConnectionServiceDeleteConnectionMutationResult = Awaited< - ReturnType ->; +export type XcomServiceUpdateXcomEntryMutationResult = Awaited>; +export type VariableServicePatchVariableMutationResult = Awaited>; +export type VariableServiceBulkVariablesMutationResult = Awaited>; +export type HumanInTheLoopServiceUpdateHitlDetailMutationResult = Awaited>; +export type HumanInTheLoopServiceUpdateMappedTiHitlDetailMutationResult = Awaited>; +export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited>; +export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited>; +export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited>; +export type ConnectionServiceDeleteConnectionMutationResult = Awaited>; export type DagRunServiceDeleteDagRunMutationResult = Awaited>; export type DagServiceDeleteDagMutationResult = Awaited>; +export type TaskInstanceServiceDeleteTaskInstanceMutationResult = Awaited>; export type PoolServiceDeletePoolMutationResult = Awaited>; -export type VariableServiceDeleteVariableMutationResult = Awaited< - ReturnType ->; +export type VariableServiceDeleteVariableMutationResult = Awaited>; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts index cd06c2bff30ea..7a18c205ce06b 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts @@ -1,2518 +1,1448 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { type QueryClient } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { - AssetService, - AuthLinksService, - BackfillService, - ConfigService, - ConnectionService, - DagReportService, - DagRunService, - DagService, - DagSourceService, - DagStatsService, - DagVersionService, - DagWarningService, - DagsService, - DashboardService, - DependenciesService, - EventLogService, - ExtraLinksService, - GridService, - ImportErrorService, - JobService, - LoginService, - MonitorService, - PluginService, - PoolService, - ProviderService, - StructureService, - TaskInstanceService, - TaskService, - VariableService, - VersionService, - XcomService, -} from "../requests/services.gen"; +import { type QueryClient } from "@tanstack/react-query"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; - /** - * Get Assets - * Get assets. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.uriPattern - * @param data.dagIds - * @param data.onlyActive - * @param data.orderBy - * @returns AssetCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetAssetsData = ( - queryClient: QueryClient, - { - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetAssetsKeyFn({ - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }), - queryFn: () => - AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), - }); -/** - * Get Asset Aliases - * Get asset aliases. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.orderBy - * @returns AssetAliasCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetAssetAliasesData = ( - queryClient: QueryClient, - { - limit, - namePattern, - offset, - orderBy, - }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), - queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }), - }); -/** - * Get Asset Alias - * Get an asset alias. - * @param data The data for the request. - * @param data.assetAliasId - * @returns unknown Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetAssetAliasData = ( - queryClient: QueryClient, - { - assetAliasId, - }: { - assetAliasId: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), - queryFn: () => AssetService.getAssetAlias({ assetAliasId }), - }); -/** - * Get Asset Events - * Get asset events. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.assetId - * @param data.sourceDagId - * @param data.sourceTaskId - * @param data.sourceRunId - * @param data.sourceMapIndex - * @param data.timestampGte - * @param data.timestampLte - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetAssetEventsData = ( - queryClient: QueryClient, - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }), - queryFn: () => - AssetService.getAssetEvents({ - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }), - }); -/** - * Get Asset Queued Events - * Get queued asset events for an asset. - * @param data The data for the request. - * @param data.assetId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetAssetQueuedEventsData = ( - queryClient: QueryClient, - { - assetId, - before, - }: { - assetId: number; - before?: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), - queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }), - }); -/** - * Get Asset - * Get an asset. - * @param data The data for the request. - * @param data.assetId - * @returns AssetResponse Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetAssetData = ( - queryClient: QueryClient, - { - assetId, - }: { - assetId: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), - queryFn: () => AssetService.getAsset({ assetId }), - }); -/** - * Get Dag Asset Queued Events - * Get queued asset events for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetDagAssetQueuedEventsData = ( - queryClient: QueryClient, - { - before, - dagId, - }: { - before?: string; - dagId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), - queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }), - }); -/** - * Get Dag Asset Queued Event - * Get a queued asset event for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.assetId - * @param data.before - * @returns QueuedEventResponse Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceGetDagAssetQueuedEventData = ( - queryClient: QueryClient, - { - assetId, - before, - dagId, - }: { - assetId: number; - before?: string; - dagId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), - queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }), - }); -/** - * Next Run Assets - * @param data The data for the request. - * @param data.dagId - * @returns unknown Successful Response - * @throws ApiError - */ -export const ensureUseAssetServiceNextRunAssetsData = ( - queryClient: QueryClient, - { - dagId, - }: { - dagId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), - queryFn: () => AssetService.nextRunAssets({ dagId }), - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseBackfillServiceListBackfillsData = ( - queryClient: QueryClient, - { - dagId, - limit, - offset, - orderBy, - }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), - queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }), - }); -/** - * Get Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const ensureUseBackfillServiceGetBackfillData = ( - queryClient: QueryClient, - { - backfillId, - }: { - backfillId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), - queryFn: () => BackfillService.getBackfill({ backfillId }), - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.active - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseBackfillServiceListBackfills1Data = ( - queryClient: QueryClient, - { - active, - dagId, - limit, - offset, - orderBy, - }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }), - queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }), - }); -/** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseConnectionServiceGetConnectionData = ( - queryClient: QueryClient, - { - connectionId, - }: { - connectionId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), - queryFn: () => ConnectionService.getConnection({ connectionId }), - }); -/** - * Get Connections - * Get all connection entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.connectionIdPattern - * @returns ConnectionCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseConnectionServiceGetConnectionsData = ( - queryClient: QueryClient, - { - connectionIdPattern, - limit, - offset, - orderBy, - }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), - queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }), - }); -/** - * Hook Meta Data - * Retrieve information about available connection types (hook classes) and their parameters. - * @returns ConnectionHookMetaData Successful Response - * @throws ApiError - */ -export const ensureUseConnectionServiceHookMetaDataData = (queryClient: QueryClient) => - queryClient.ensureQueryData({ - queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), - queryFn: () => ConnectionService.hookMetaData(), - }); -/** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagRunServiceGetDagRunData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), - }); -/** - * Get Upstream Asset Events - * If dag run is asset-triggered, return the asset events that triggered it. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagRunServiceGetUpstreamAssetEventsData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), - queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }), - }); -/** - * Get Dag Runs - * Get all DAG Runs. - * - * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.runType - * @param data.state - * @param data.orderBy - * @returns DAGRunCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagRunServiceGetDagRunsData = ( - queryClient: QueryClient, - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }), - queryFn: () => - DagRunService.getDagRuns({ - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }), - }); -/** - * Get Dag Source - * Get source code using file token. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @param data.accept - * @returns DAGSourceResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagSourceServiceGetDagSourceData = ( - queryClient: QueryClient, - { - accept, - dagId, - versionNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), - queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }), - }); -/** - * Get Dag Stats - * Get Dag statistics. - * @param data The data for the request. - * @param data.dagIds - * @returns DagStatsCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagStatsServiceGetDagStatsData = ( - queryClient: QueryClient, - { - dagIds, - }: { - dagIds?: string[]; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), - queryFn: () => DagStatsService.getDagStats({ dagIds }), - }); -/** - * Get Dag Reports - * Get DAG report. - * @param data The data for the request. - * @param data.subdir - * @returns unknown Successful Response - * @throws ApiError - */ -export const ensureUseDagReportServiceGetDagReportsData = ( - queryClient: QueryClient, - { - subdir, - }: { - subdir: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), - queryFn: () => DagReportService.getDagReports({ subdir }), - }); -/** - * Get Config - * @param data The data for the request. - * @param data.section - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const ensureUseConfigServiceGetConfigData = ( - queryClient: QueryClient, - { - accept, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), - queryFn: () => ConfigService.getConfig({ accept, section }), - }); -/** - * Get Config Value - * @param data The data for the request. - * @param data.section - * @param data.option - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const ensureUseConfigServiceGetConfigValueData = ( - queryClient: QueryClient, - { - accept, - option, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), - queryFn: () => ConfigService.getConfigValue({ accept, option, section }), - }); -/** - * Get Configs - * Get configs for UI. - * @returns ConfigResponse Successful Response - * @throws ApiError - */ -export const ensureUseConfigServiceGetConfigsData = (queryClient: QueryClient) => - queryClient.ensureQueryData({ - queryKey: Common.UseConfigServiceGetConfigsKeyFn(), - queryFn: () => ConfigService.getConfigs(), - }); -/** - * List Dag Warnings - * Get a list of DAG warnings. - * @param data The data for the request. - * @param data.dagId - * @param data.warningType - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns DAGWarningCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagWarningServiceListDagWarningsData = ( - queryClient: QueryClient, - { - dagId, - limit, - offset, - orderBy, - warningType, - }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), - queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }), - }); -/** - * Get Dags - * Get all DAGs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @param data.dagRunStartDateGte - * @param data.dagRunStartDateLte - * @param data.dagRunEndDateGte - * @param data.dagRunEndDateLte - * @param data.dagRunState - * @param data.orderBy - * @returns DAGCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagServiceGetDagsData = ( - queryClient: QueryClient, - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagServiceGetDagsKeyFn({ - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }), - queryFn: () => - DagService.getDags({ - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }), - }); -/** - * Get Dag - * Get basic information about a DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagServiceGetDagData = ( - queryClient: QueryClient, - { - dagId, - }: { - dagId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), - queryFn: () => DagService.getDag({ dagId }), - }); -/** - * Get Dag Details - * Get details of DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGDetailsResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagServiceGetDagDetailsData = ( - queryClient: QueryClient, - { - dagId, - }: { - dagId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), - queryFn: () => DagService.getDagDetails({ dagId }), - }); -/** - * Get Dag Tags - * Get all DAG tags. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.tagNamePattern - * @returns DAGTagCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagServiceGetDagTagsData = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - tagNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), - queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }), - }); -/** - * Get Event Log - * @param data The data for the request. - * @param data.eventLogId - * @returns EventLogResponse Successful Response - * @throws ApiError - */ -export const ensureUseEventLogServiceGetEventLogData = ( - queryClient: QueryClient, - { - eventLogId, - }: { - eventLogId: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), - queryFn: () => EventLogService.getEventLog({ eventLogId }), - }); -/** - * Get Event Logs - * Get all Event Logs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.taskId - * @param data.runId - * @param data.mapIndex - * @param data.tryNumber - * @param data.owner - * @param data.event - * @param data.excludedEvents - * @param data.includedEvents - * @param data.before - * @param data.after - * @returns EventLogCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseEventLogServiceGetEventLogsData = ( - queryClient: QueryClient, - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }), - queryFn: () => - EventLogService.getEventLogs({ - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }), - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseExtraLinksServiceGetExtraLinksData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetExtraLinksData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetTaskInstanceData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - taskId, - }: { - dagId: string; - dagRunId: string; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), - queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }), - }); -/** - * Get Mapped Task Instances - * Get list of mapped task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetMappedTaskInstancesData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - queryFn: () => - TaskInstanceService.getMappedTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetTaskInstanceDependenciesData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - }), - queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetTaskInstanceDependencies1Data = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependencies1KeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - }), - queryFn: () => TaskInstanceService.getTaskInstanceDependencies1({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instance Tries - * Get list of task instances history. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetTaskInstanceTriesData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Mapped Task Instance Tries - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTriesData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - }), - queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Mapped Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetMappedTaskInstanceData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instances - * Get list of task instances. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs - * and DAG runs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.taskDisplayNamePattern - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetTaskInstancesData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - queryFn: () => - TaskInstanceService.getTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - }); -/** - * Get Task Instance Try Details - * Get task instance details by try number. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetTaskInstanceTryDetailsData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }), - queryFn: () => - TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), - }); -/** - * Get Mapped Task Instance Try Details - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTryDetailsData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }), - queryFn: () => - TaskInstanceService.getMappedTaskInstanceTryDetails({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }), - }); -/** - * Get Log - * Get logs for a specific task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.tryNumber - * @param data.fullContent - * @param data.mapIndex - * @param data.token - * @param data.accept - * @returns TaskInstancesLogResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskInstanceServiceGetLogData = ( - queryClient: QueryClient, - { - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }), - queryFn: () => - TaskInstanceService.getLog({ - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }), - }); -/** - * Get Import Error - * Get an import error. - * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response - * @throws ApiError - */ -export const ensureUseImportErrorServiceGetImportErrorData = ( - queryClient: QueryClient, - { - importErrorId, - }: { - importErrorId: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), - queryFn: () => ImportErrorService.getImportError({ importErrorId }), - }); -/** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseImportErrorServiceGetImportErrorsData = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), - queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }), - }); -/** - * Get Jobs - * Get all jobs. - * @param data The data for the request. - * @param data.isAlive - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.jobState - * @param data.jobType - * @param data.hostname - * @param data.executorClass - * @returns JobCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseJobServiceGetJobsData = ( - queryClient: QueryClient, - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseJobServiceGetJobsKeyFn({ - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }), - queryFn: () => - JobService.getJobs({ - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }), - }); -/** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUsePluginServiceGetPluginsData = ( - queryClient: QueryClient, - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), - queryFn: () => PluginService.getPlugins({ limit, offset }), - }); -/** - * Get Pool - * Get a pool. - * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response - * @throws ApiError - */ -export const ensureUsePoolServiceGetPoolData = ( - queryClient: QueryClient, - { - poolName, - }: { - poolName: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), - queryFn: () => PoolService.getPool({ poolName }), - }); -/** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.poolNamePattern - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUsePoolServiceGetPoolsData = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - poolNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), - queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }), - }); -/** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseProviderServiceGetProvidersData = ( - queryClient: QueryClient, - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), - queryFn: () => ProviderService.getProviders({ limit, offset }), - }); -/** - * Get Xcom Entry - * Get an XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.xcomKey - * @param data.mapIndex - * @param data.deserialize - * @param data.stringify - * @returns unknown Successful Response - * @throws ApiError - */ -export const ensureUseXcomServiceGetXcomEntryData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }), - queryFn: () => - XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), - }); -/** - * Get Xcom Entries - * Get all XCom entries. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.xcomKey - * @param data.mapIndex - * @param data.limit - * @param data.offset - * @returns XComCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseXcomServiceGetXcomEntriesData = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - limit, - mapIndex, - offset, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ - dagId, - dagRunId, - limit, - mapIndex, - offset, - taskId, - xcomKey, - }), - queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), - }); -/** - * Get Tasks - * Get tasks for DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskServiceGetTasksData = ( - queryClient: QueryClient, - { - dagId, - orderBy, - }: { - dagId: string; - orderBy?: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), - queryFn: () => TaskService.getTasks({ dagId, orderBy }), - }); -/** - * Get Task - * Get simplified representation of a task. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response - * @throws ApiError - */ -export const ensureUseTaskServiceGetTaskData = ( - queryClient: QueryClient, - { - dagId, - taskId, - }: { - dagId: string; - taskId: unknown; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), - queryFn: () => TaskService.getTask({ dagId, taskId }), - }); -/** - * Get Variable - * Get a variable entry. - * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response - * @throws ApiError - */ -export const ensureUseVariableServiceGetVariableData = ( - queryClient: QueryClient, - { - variableKey, - }: { - variableKey: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), - queryFn: () => VariableService.getVariable({ variableKey }), - }); -/** - * Get Variables - * Get all Variables entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.variableKeyPattern - * @returns VariableCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseVariableServiceGetVariablesData = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - variableKeyPattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), - queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }), - }); -/** - * Get Dag Version - * Get one Dag Version. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @returns DagVersionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagVersionServiceGetDagVersionData = ( - queryClient: QueryClient, - { - dagId, - versionNumber, - }: { - dagId: string; - versionNumber: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), - queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }), - }); -/** - * Get Dag Versions - * Get all DAG Versions. - * - * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.versionNumber - * @param data.bundleName - * @param data.bundleVersion - * @param data.orderBy - * @returns DAGVersionCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagVersionServiceGetDagVersionsData = ( - queryClient: QueryClient, - { - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }), - queryFn: () => - DagVersionService.getDagVersions({ - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }), - }); -/** - * Get Health - * @returns HealthInfoResponse Successful Response - * @throws ApiError - */ -export const ensureUseMonitorServiceGetHealthData = (queryClient: QueryClient) => - queryClient.ensureQueryData({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(), - queryFn: () => MonitorService.getHealth(), - }); -/** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ -export const ensureUseVersionServiceGetVersionData = (queryClient: QueryClient) => - queryClient.ensureQueryData({ - queryKey: Common.UseVersionServiceGetVersionKeyFn(), - queryFn: () => VersionService.getVersion(), - }); -/** - * Login - * Redirect to the login URL depending on the AuthManager configured. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const ensureUseLoginServiceLoginData = ( - queryClient: QueryClient, - { - next, - }: { - next?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseLoginServiceLoginKeyFn({ next }), - queryFn: () => LoginService.login({ next }), - }); -/** - * Logout - * Logout the user. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const ensureUseLoginServiceLogoutData = ( - queryClient: QueryClient, - { - next, - }: { - next?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), - queryFn: () => LoginService.logout({ next }), - }); -/** - * Get Auth Menus - * @returns MenuItemCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseAuthLinksServiceGetAuthMenusData = (queryClient: QueryClient) => - queryClient.ensureQueryData({ - queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), - queryFn: () => AuthLinksService.getAuthMenus(), - }); -/** - * Recent Dag Runs - * Get recent DAG runs. - * @param data The data for the request. - * @param data.dagRunsLimit - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIds - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @returns DAGWithLatestDagRunsCollectionResponse Successful Response - * @throws ApiError - */ -export const ensureUseDagsServiceRecentDagRunsData = ( - queryClient: QueryClient, - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDagsServiceRecentDagRunsKeyFn({ - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }), - queryFn: () => - DagsService.recentDagRuns({ - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }), - }); -/** - * Get Dependencies - * Dependencies graph. - * @param data The data for the request. - * @param data.nodeId - * @returns BaseGraphResponse Successful Response - * @throws ApiError - */ -export const ensureUseDependenciesServiceGetDependenciesData = ( - queryClient: QueryClient, - { - nodeId, - }: { - nodeId?: string; - } = {}, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), - queryFn: () => DependenciesService.getDependencies({ nodeId }), - }); -/** - * Historical Metrics - * Return cluster activity historical metrics. - * @param data The data for the request. - * @param data.startDate - * @param data.endDate - * @returns HistoricalMetricDataResponse Successful Response - * @throws ApiError - */ -export const ensureUseDashboardServiceHistoricalMetricsData = ( - queryClient: QueryClient, - { - endDate, - startDate, - }: { - endDate?: string; - startDate: string; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), - queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }), - }); -/** - * Structure Data - * Get Structure Data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.externalDependencies - * @param data.versionNumber - * @returns StructureDataResponse Successful Response - * @throws ApiError - */ -export const ensureUseStructureServiceStructureDataData = ( - queryClient: QueryClient, - { - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseStructureServiceStructureDataKeyFn({ - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }), - queryFn: () => - StructureService.structureData({ - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }), - }); -/** - * Grid Data - * Return grid data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.offset - * @param data.runType - * @param data.state - * @param data.limit - * @param data.orderBy - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @returns GridResponse Successful Response - * @throws ApiError - */ -export const ensureUseGridServiceGridDataData = ( - queryClient: QueryClient, - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; - }, -) => - queryClient.ensureQueryData({ - queryKey: Common.UseGridServiceGridDataKeyFn({ - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }), - queryFn: () => - GridService.gridData({ - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }), - }); +* Get Assets +* Get assets. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.uriPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagIds +* @param data.onlyActive +* @param data.orderBy +* @returns AssetCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetAssetsData = (queryClient: QueryClient, { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string[]; + uriPattern?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) }); +/** +* Get Asset Aliases +* Get asset aliases. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.orderBy +* @returns AssetAliasCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetAssetAliasesData = (queryClient: QueryClient, { limit, namePattern, offset, orderBy }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) }); +/** +* Get Asset Alias +* Get an asset alias. +* @param data The data for the request. +* @param data.assetAliasId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetAssetAliasData = (queryClient: QueryClient, { assetAliasId }: { + assetAliasId: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) }); +/** +* Get Asset Events +* Get asset events. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.assetId +* @param data.sourceDagId +* @param data.sourceTaskId +* @param data.sourceRunId +* @param data.sourceMapIndex +* @param data.timestampGte +* @param data.timestampLte +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetAssetEventsData = (queryClient: QueryClient, { assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string[]; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) }); +/** +* Get Asset Queued Events +* Get queued asset events for an asset. +* @param data The data for the request. +* @param data.assetId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetAssetQueuedEventsData = (queryClient: QueryClient, { assetId, before }: { + assetId: number; + before?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) }); +/** +* Get Asset +* Get an asset. +* @param data The data for the request. +* @param data.assetId +* @returns AssetResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetAssetData = (queryClient: QueryClient, { assetId }: { + assetId: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), queryFn: () => AssetService.getAsset({ assetId }) }); +/** +* Get Dag Asset Queued Events +* Get queued asset events for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetDagAssetQueuedEventsData = (queryClient: QueryClient, { before, dagId }: { + before?: string; + dagId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) }); +/** +* Get Dag Asset Queued Event +* Get a queued asset event for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.assetId +* @param data.before +* @returns QueuedEventResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceGetDagAssetQueuedEventData = (queryClient: QueryClient, { assetId, before, dagId }: { + assetId: number; + before?: string; + dagId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) }); +/** +* Next Run Assets +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseAssetServiceNextRunAssetsData = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), queryFn: () => AssetService.nextRunAssets({ dagId }) }); +/** +* List Backfills +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseBackfillServiceListBackfillsData = (queryClient: QueryClient, { dagId, limit, offset, orderBy }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; +}) => queryClient.ensureQueryData({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) }); +/** +* Get Backfill +* @param data The data for the request. +* @param data.backfillId +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const ensureUseBackfillServiceGetBackfillData = (queryClient: QueryClient, { backfillId }: { + backfillId: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), queryFn: () => BackfillService.getBackfill({ backfillId }) }); +/** +* List Backfills Ui +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.active +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseBackfillServiceListBackfillsUiData = (queryClient: QueryClient, { active, dagId, limit, offset, orderBy }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseBackfillServiceListBackfillsUiKeyFn({ active, dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfillsUi({ active, dagId, limit, offset, orderBy }) }); +/** +* Get Connection +* Get a connection entry. +* @param data The data for the request. +* @param data.connectionId +* @returns ConnectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseConnectionServiceGetConnectionData = (queryClient: QueryClient, { connectionId }: { + connectionId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), queryFn: () => ConnectionService.getConnection({ connectionId }) }); +/** +* Get Connections +* Get all connection entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.connectionIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns ConnectionCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseConnectionServiceGetConnectionsData = (queryClient: QueryClient, { connectionIdPattern, limit, offset, orderBy }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) }); +/** +* Hook Meta Data +* Retrieve information about available connection types (hook classes) and their parameters. +* @returns ConnectionHookMetaData Successful Response +* @throws ApiError +*/ +export const ensureUseConnectionServiceHookMetaDataData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), queryFn: () => ConnectionService.hookMetaData() }); +/** +* Get Dag Run +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns DAGRunResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagRunServiceGetDagRunData = (queryClient: QueryClient, { dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) }); +/** +* Get Upstream Asset Events +* If dag run is asset-triggered, return the asset events that triggered it. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagRunServiceGetUpstreamAssetEventsData = (queryClient: QueryClient, { dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) }); +/** +* Get Dag Runs +* Get all DAG Runs. +* +* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.runType +* @param data.state +* @param data.orderBy +* @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.triggeringUserNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGRunCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagRunServiceGetDagRunsData = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; + runIdPattern?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + triggeringUserNamePattern?: string; + updatedAtGte?: string; + updatedAtLte?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseDagRunServiceWaitDagRunUntilFinishedData = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseExperimentalServiceWaitDagRunUntilFinishedData = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.ensureQueryData({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); +/** +* Get Dag Source +* Get source code using file token. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @param data.accept +* @returns DAGSourceResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagSourceServiceGetDagSourceData = (queryClient: QueryClient, { accept, dagId, versionNumber }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) }); +/** +* Get Dag Stats +* Get Dag statistics. +* @param data The data for the request. +* @param data.dagIds +* @returns DagStatsCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagStatsServiceGetDagStatsData = (queryClient: QueryClient, { dagIds }: { + dagIds?: string[]; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), queryFn: () => DagStatsService.getDagStats({ dagIds }) }); +/** +* Get Dag Reports +* Get DAG report. +* @param data The data for the request. +* @param data.subdir +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseDagReportServiceGetDagReportsData = (queryClient: QueryClient, { subdir }: { + subdir: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), queryFn: () => DagReportService.getDagReports({ subdir }) }); +/** +* Get Config +* @param data The data for the request. +* @param data.section +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const ensureUseConfigServiceGetConfigData = (queryClient: QueryClient, { accept, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), queryFn: () => ConfigService.getConfig({ accept, section }) }); +/** +* Get Config Value +* @param data The data for the request. +* @param data.section +* @param data.option +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const ensureUseConfigServiceGetConfigValueData = (queryClient: QueryClient, { accept, option, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) }); +/** +* Get Configs +* Get configs for UI. +* @returns ConfigResponse Successful Response +* @throws ApiError +*/ +export const ensureUseConfigServiceGetConfigsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(), queryFn: () => ConfigService.getConfigs() }); +/** +* List Dag Warnings +* Get a list of DAG warnings. +* @param data The data for the request. +* @param data.dagId +* @param data.warningType +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns DAGWarningCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagWarningServiceListDagWarningsData = (queryClient: QueryClient, { dagId, limit, offset, orderBy, warningType }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + warningType?: DagWarningType; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) }); +/** +* Get Dags +* Get all DAGs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.dagRunStartDateGte +* @param data.dagRunStartDateLte +* @param data.dagRunEndDateGte +* @param data.dagRunEndDateLte +* @param data.dagRunState +* @param data.orderBy +* @param data.isFavorite +* @returns DAGCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagServiceGetDagsData = (queryClient: QueryClient, { bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagsKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +/** +* Get Dag +* Get basic information about a DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagServiceGetDagData = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), queryFn: () => DagService.getDag({ dagId }) }); +/** +* Get Dag Details +* Get details of DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGDetailsResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagServiceGetDagDetailsData = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), queryFn: () => DagService.getDagDetails({ dagId }) }); +/** +* Get Dag Tags +* Get all DAG tags. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.tagNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGTagCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagServiceGetDagTagsData = (queryClient: QueryClient, { limit, offset, orderBy, tagNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + tagNamePattern?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) }); +/** +* Get Dags +* Get DAGs with recent DagRun. +* @param data The data for the request. +* @param data.dagRunsLimit +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIds +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @param data.isFavorite +* @returns DAGWithLatestDagRunsCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagServiceGetDagsUiData = (queryClient: QueryClient, { bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDagsUi({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +/** +* Get Latest Run Info +* Get latest run. +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseDagServiceGetLatestRunInfoData = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetLatestRunInfoKeyFn({ dagId }), queryFn: () => DagService.getLatestRunInfo({ dagId }) }); +/** +* Get Event Log +* @param data The data for the request. +* @param data.eventLogId +* @returns EventLogResponse Successful Response +* @throws ApiError +*/ +export const ensureUseEventLogServiceGetEventLogData = (queryClient: QueryClient, { eventLogId }: { + eventLogId: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), queryFn: () => EventLogService.getEventLog({ eventLogId }) }); +/** +* Get Event Logs +* Get all Event Logs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.taskId +* @param data.runId +* @param data.mapIndex +* @param data.tryNumber +* @param data.owner +* @param data.event +* @param data.excludedEvents +* @param data.includedEvents +* @param data.before +* @param data.after +* @returns EventLogCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseEventLogServiceGetEventLogsData = (queryClient: QueryClient, { after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string[]; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseExtraLinksServiceGetExtraLinksData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetExtraLinksData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetTaskInstanceData = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Task Instances +* Get list of mapped task instances. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetMappedTaskInstancesData = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetTaskInstanceDependenciesData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instance Tries +* Get list of task instances history. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetTaskInstanceTriesData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Mapped Task Instance Tries +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTriesData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Mapped Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetMappedTaskInstanceData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instances +* Get list of task instances. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs +* and DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.taskDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetTaskInstancesData = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); +/** +* Get Task Instance Try Details +* Get task instance details by try number. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetTaskInstanceTryDetailsData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); +/** +* Get Mapped Task Instance Try Details +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTryDetailsData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); +/** +* Get Log +* Get logs for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.fullContent +* @param data.mapIndex +* @param data.token +* @param data.accept +* @returns TaskInstancesLogResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetLogData = (queryClient: QueryClient, { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) }); +/** +* Get External Log Url +* Get external log URL for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.mapIndex +* @returns ExternalLogUrlResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskInstanceServiceGetExternalLogUrlData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, tryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + tryNumber: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetExternalLogUrlKeyFn({ dagId, dagRunId, mapIndex, taskId, tryNumber }), queryFn: () => TaskInstanceService.getExternalLogUrl({ dagId, dagRunId, mapIndex, taskId, tryNumber }) }); +/** +* Get Import Error +* Get an import error. +* @param data The data for the request. +* @param data.importErrorId +* @returns ImportErrorResponse Successful Response +* @throws ApiError +*/ +export const ensureUseImportErrorServiceGetImportErrorData = (queryClient: QueryClient, { importErrorId }: { + importErrorId: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), queryFn: () => ImportErrorService.getImportError({ importErrorId }) }); +/** +* Get Import Errors +* Get all import errors. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns ImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseImportErrorServiceGetImportErrorsData = (queryClient: QueryClient, { limit, offset, orderBy }: { + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) }); +/** +* Get Jobs +* Get all jobs. +* @param data The data for the request. +* @param data.isAlive +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.jobState +* @param data.jobType +* @param data.hostname +* @param data.executorClass +* @returns JobCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseJobServiceGetJobsData = (queryClient: QueryClient, { endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + startDateGte?: string; + startDateLte?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) }); +/** +* Get Plugins +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns PluginCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUsePluginServiceGetPluginsData = (queryClient: QueryClient, { limit, offset }: { + limit?: number; + offset?: number; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), queryFn: () => PluginService.getPlugins({ limit, offset }) }); +/** +* Import Errors +* @returns PluginImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUsePluginServiceImportErrorsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UsePluginServiceImportErrorsKeyFn(), queryFn: () => PluginService.importErrors() }); +/** +* Get Pool +* Get a pool. +* @param data The data for the request. +* @param data.poolName +* @returns PoolResponse Successful Response +* @throws ApiError +*/ +export const ensureUsePoolServiceGetPoolData = (queryClient: QueryClient, { poolName }: { + poolName: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), queryFn: () => PoolService.getPool({ poolName }) }); +/** +* Get Pools +* Get all pools entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.poolNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns PoolCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUsePoolServiceGetPoolsData = (queryClient: QueryClient, { limit, offset, orderBy, poolNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + poolNamePattern?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) }); +/** +* Get Providers +* Get providers. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns ProviderCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseProviderServiceGetProvidersData = (queryClient: QueryClient, { limit, offset }: { + limit?: number; + offset?: number; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), queryFn: () => ProviderService.getProviders({ limit, offset }) }); +/** +* Get Xcom Entry +* Get an XCom entry. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @param data.dagRunId +* @param data.xcomKey +* @param data.mapIndex +* @param data.deserialize +* @param data.stringify +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseXcomServiceGetXcomEntryData = (queryClient: QueryClient, { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) }); +/** +* Get Xcom Entries +* Get all XCom entries. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.xcomKey +* @param data.mapIndex +* @param data.limit +* @param data.offset +* @returns XComCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseXcomServiceGetXcomEntriesData = (queryClient: QueryClient, { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) }); +/** +* Get Tasks +* Get tasks for DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.orderBy +* @returns TaskCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskServiceGetTasksData = (queryClient: QueryClient, { dagId, orderBy }: { + dagId: string; + orderBy?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), queryFn: () => TaskService.getTasks({ dagId, orderBy }) }); +/** +* Get Task +* Get simplified representation of a task. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @returns TaskResponse Successful Response +* @throws ApiError +*/ +export const ensureUseTaskServiceGetTaskData = (queryClient: QueryClient, { dagId, taskId }: { + dagId: string; + taskId: unknown; +}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), queryFn: () => TaskService.getTask({ dagId, taskId }) }); +/** +* Get Variable +* Get a variable entry. +* @param data The data for the request. +* @param data.variableKey +* @returns VariableResponse Successful Response +* @throws ApiError +*/ +export const ensureUseVariableServiceGetVariableData = (queryClient: QueryClient, { variableKey }: { + variableKey: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), queryFn: () => VariableService.getVariable({ variableKey }) }); +/** +* Get Variables +* Get all Variables entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.variableKeyPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns VariableCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseVariableServiceGetVariablesData = (queryClient: QueryClient, { limit, offset, orderBy, variableKeyPattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + variableKeyPattern?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) }); +/** +* Get Dag Version +* Get one Dag Version. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @returns DagVersionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagVersionServiceGetDagVersionData = (queryClient: QueryClient, { dagId, versionNumber }: { + dagId: string; + versionNumber: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) }); +/** +* Get Dag Versions +* Get all DAG Versions. +* +* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.versionNumber +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @returns DAGVersionCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDagVersionServiceGetDagVersionsData = (queryClient: QueryClient, { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + versionNumber?: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); +/** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetHitlDetailData = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetMappedTiHitlDetailData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagRunId +* @param data.taskId +* @param data.taskIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.responseReceived +* @param data.userId +* @param data.subjectSearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.bodySearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetHitlDetailsData = (queryClient: QueryClient, { bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }: { + bodySearch?: string; + dagId?: string; + dagIdPattern?: string; + dagRunId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + responseReceived?: boolean; + state?: string[]; + subjectSearch?: string; + taskId?: string; + taskIdPattern?: string; + userId?: string[]; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }), queryFn: () => HumanInTheLoopService.getHitlDetails({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }) }); +/** +* Get Health +* @returns HealthInfoResponse Successful Response +* @throws ApiError +*/ +export const ensureUseMonitorServiceGetHealthData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(), queryFn: () => MonitorService.getHealth() }); +/** +* Get Version +* Get version information. +* @returns VersionInfo Successful Response +* @throws ApiError +*/ +export const ensureUseVersionServiceGetVersionData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseVersionServiceGetVersionKeyFn(), queryFn: () => VersionService.getVersion() }); +/** +* Login +* Redirect to the login URL depending on the AuthManager configured. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseLoginServiceLoginData = (queryClient: QueryClient, { next }: { + next?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }), queryFn: () => LoginService.login({ next }) }); +/** +* Logout +* Logout the user. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseLoginServiceLogoutData = (queryClient: QueryClient, { next }: { + next?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), queryFn: () => LoginService.logout({ next }) }); +/** +* Refresh +* Refresh the authentication token. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseLoginServiceRefreshData = (queryClient: QueryClient, { next }: { + next?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseLoginServiceRefreshKeyFn({ next }), queryFn: () => LoginService.refresh({ next }) }); +/** +* Get Auth Menus +* @returns MenuItemCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseAuthLinksServiceGetAuthMenusData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), queryFn: () => AuthLinksService.getAuthMenus() }); +/** +* Get Dependencies +* Dependencies graph. +* @param data The data for the request. +* @param data.nodeId +* @returns BaseGraphResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDependenciesServiceGetDependenciesData = (queryClient: QueryClient, { nodeId }: { + nodeId?: string; +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), queryFn: () => DependenciesService.getDependencies({ nodeId }) }); +/** +* Historical Metrics +* Return cluster activity historical metrics. +* @param data The data for the request. +* @param data.startDate +* @param data.endDate +* @returns HistoricalMetricDataResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDashboardServiceHistoricalMetricsData = (queryClient: QueryClient, { endDate, startDate }: { + endDate?: string; + startDate: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) }); +/** +* Dag Stats +* Return basic DAG stats with counts of DAGs in various states. +* @returns DashboardDagStatsResponse Successful Response +* @throws ApiError +*/ +export const ensureUseDashboardServiceDagStatsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(), queryFn: () => DashboardService.dagStats() }); +/** +* Structure Data +* Get Structure Data. +* @param data The data for the request. +* @param data.dagId +* @param data.includeUpstream +* @param data.includeDownstream +* @param data.root +* @param data.externalDependencies +* @param data.versionNumber +* @returns StructureDataResponse Successful Response +* @throws ApiError +*/ +export const ensureUseStructureServiceStructureDataData = (queryClient: QueryClient, { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; +}) => queryClient.ensureQueryData({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) }); +/** +* Get Dag Structure +* Return dag structure for grid view. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridNodeResponse Successful Response +* @throws ApiError +*/ +export const ensureUseGridServiceGetDagStructureData = (queryClient: QueryClient, { dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseGridServiceGetDagStructureKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }), queryFn: () => GridService.getDagStructure({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) }); +/** +* Get Grid Runs +* Get info about a run for the grid. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridRunsResponse Successful Response +* @throws ApiError +*/ +export const ensureUseGridServiceGetGridRunsData = (queryClient: QueryClient, { dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseGridServiceGetGridRunsKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }), queryFn: () => GridService.getGridRuns({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) }); +/** +* Get Grid Ti Summaries +* Get states for TIs / "groups" of TIs. +* +* Essentially this is to know what color to put in the squares in the grid. +* +* The tricky part here is that we aggregate the state for groups and mapped tasks. +* +* We don't add all the TIs for mapped TIs -- we only add one entry for the mapped task and +* its state is an aggregate of its TI states. +* +* And for task groups, we add a "task" for that which is not really a task but is just +* an entry that represents the group (so that we can show a filled in box when the group +* is not expanded) and its state is an agg of those within it. +* @param data The data for the request. +* @param data.dagId +* @param data.runId +* @returns GridTISummaries Successful Response +* @throws ApiError +*/ +export const ensureUseGridServiceGetGridTiSummariesData = (queryClient: QueryClient, { dagId, runId }: { + dagId: string; + runId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseGridServiceGetGridTiSummariesKeyFn({ dagId, runId }), queryFn: () => GridService.getGridTiSummaries({ dagId, runId }) }); +/** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseCalendarServiceGetCalendarData = (queryClient: QueryClient, { dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts index 987c8a4ea6dde..8e9b6922f00c8 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts @@ -1,4 +1,4 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 +// generated with @7nohe/openapi-react-query-codegen@1.6.2 export * from "./common"; export * from "./queries"; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts index 0baac0445f402..37298729b1133 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts @@ -1 +1,2 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 +// generated with @7nohe/openapi-react-query-codegen@1.6.2 + diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts index 14649a703b1d9..d4df09ab1c730 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts @@ -1,2518 +1,1448 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { type QueryClient } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { - AssetService, - AuthLinksService, - BackfillService, - ConfigService, - ConnectionService, - DagReportService, - DagRunService, - DagService, - DagSourceService, - DagStatsService, - DagVersionService, - DagWarningService, - DagsService, - DashboardService, - DependenciesService, - EventLogService, - ExtraLinksService, - GridService, - ImportErrorService, - JobService, - LoginService, - MonitorService, - PluginService, - PoolService, - ProviderService, - StructureService, - TaskInstanceService, - TaskService, - VariableService, - VersionService, - XcomService, -} from "../requests/services.gen"; +import { type QueryClient } from "@tanstack/react-query"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; - /** - * Get Assets - * Get assets. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.uriPattern - * @param data.dagIds - * @param data.onlyActive - * @param data.orderBy - * @returns AssetCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetAssets = ( - queryClient: QueryClient, - { - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetAssetsKeyFn({ - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }), - queryFn: () => - AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), - }); -/** - * Get Asset Aliases - * Get asset aliases. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.orderBy - * @returns AssetAliasCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetAssetAliases = ( - queryClient: QueryClient, - { - limit, - namePattern, - offset, - orderBy, - }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), - queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }), - }); -/** - * Get Asset Alias - * Get an asset alias. - * @param data The data for the request. - * @param data.assetAliasId - * @returns unknown Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetAssetAlias = ( - queryClient: QueryClient, - { - assetAliasId, - }: { - assetAliasId: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), - queryFn: () => AssetService.getAssetAlias({ assetAliasId }), - }); -/** - * Get Asset Events - * Get asset events. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.assetId - * @param data.sourceDagId - * @param data.sourceTaskId - * @param data.sourceRunId - * @param data.sourceMapIndex - * @param data.timestampGte - * @param data.timestampLte - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetAssetEvents = ( - queryClient: QueryClient, - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }), - queryFn: () => - AssetService.getAssetEvents({ - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }), - }); -/** - * Get Asset Queued Events - * Get queued asset events for an asset. - * @param data The data for the request. - * @param data.assetId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetAssetQueuedEvents = ( - queryClient: QueryClient, - { - assetId, - before, - }: { - assetId: number; - before?: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), - queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }), - }); -/** - * Get Asset - * Get an asset. - * @param data The data for the request. - * @param data.assetId - * @returns AssetResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetAsset = ( - queryClient: QueryClient, - { - assetId, - }: { - assetId: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), - queryFn: () => AssetService.getAsset({ assetId }), - }); -/** - * Get Dag Asset Queued Events - * Get queued asset events for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetDagAssetQueuedEvents = ( - queryClient: QueryClient, - { - before, - dagId, - }: { - before?: string; - dagId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), - queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }), - }); -/** - * Get Dag Asset Queued Event - * Get a queued asset event for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.assetId - * @param data.before - * @returns QueuedEventResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceGetDagAssetQueuedEvent = ( - queryClient: QueryClient, - { - assetId, - before, - dagId, - }: { - assetId: number; - before?: string; - dagId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), - queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }), - }); -/** - * Next Run Assets - * @param data The data for the request. - * @param data.dagId - * @returns unknown Successful Response - * @throws ApiError - */ -export const prefetchUseAssetServiceNextRunAssets = ( - queryClient: QueryClient, - { - dagId, - }: { - dagId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), - queryFn: () => AssetService.nextRunAssets({ dagId }), - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseBackfillServiceListBackfills = ( - queryClient: QueryClient, - { - dagId, - limit, - offset, - orderBy, - }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), - queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }), - }); -/** - * Get Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const prefetchUseBackfillServiceGetBackfill = ( - queryClient: QueryClient, - { - backfillId, - }: { - backfillId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), - queryFn: () => BackfillService.getBackfill({ backfillId }), - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.active - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseBackfillServiceListBackfills1 = ( - queryClient: QueryClient, - { - active, - dagId, - limit, - offset, - orderBy, - }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }), - queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }), - }); -/** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseConnectionServiceGetConnection = ( - queryClient: QueryClient, - { - connectionId, - }: { - connectionId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), - queryFn: () => ConnectionService.getConnection({ connectionId }), - }); -/** - * Get Connections - * Get all connection entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.connectionIdPattern - * @returns ConnectionCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseConnectionServiceGetConnections = ( - queryClient: QueryClient, - { - connectionIdPattern, - limit, - offset, - orderBy, - }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), - queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }), - }); -/** - * Hook Meta Data - * Retrieve information about available connection types (hook classes) and their parameters. - * @returns ConnectionHookMetaData Successful Response - * @throws ApiError - */ -export const prefetchUseConnectionServiceHookMetaData = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), - queryFn: () => ConnectionService.hookMetaData(), - }); -/** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagRunServiceGetDagRun = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), - }); -/** - * Get Upstream Asset Events - * If dag run is asset-triggered, return the asset events that triggered it. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagRunServiceGetUpstreamAssetEvents = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), - queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }), - }); -/** - * Get Dag Runs - * Get all DAG Runs. - * - * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.runType - * @param data.state - * @param data.orderBy - * @returns DAGRunCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagRunServiceGetDagRuns = ( - queryClient: QueryClient, - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }), - queryFn: () => - DagRunService.getDagRuns({ - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }), - }); -/** - * Get Dag Source - * Get source code using file token. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @param data.accept - * @returns DAGSourceResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagSourceServiceGetDagSource = ( - queryClient: QueryClient, - { - accept, - dagId, - versionNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), - queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }), - }); -/** - * Get Dag Stats - * Get Dag statistics. - * @param data The data for the request. - * @param data.dagIds - * @returns DagStatsCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagStatsServiceGetDagStats = ( - queryClient: QueryClient, - { - dagIds, - }: { - dagIds?: string[]; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), - queryFn: () => DagStatsService.getDagStats({ dagIds }), - }); -/** - * Get Dag Reports - * Get DAG report. - * @param data The data for the request. - * @param data.subdir - * @returns unknown Successful Response - * @throws ApiError - */ -export const prefetchUseDagReportServiceGetDagReports = ( - queryClient: QueryClient, - { - subdir, - }: { - subdir: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), - queryFn: () => DagReportService.getDagReports({ subdir }), - }); -/** - * Get Config - * @param data The data for the request. - * @param data.section - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const prefetchUseConfigServiceGetConfig = ( - queryClient: QueryClient, - { - accept, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), - queryFn: () => ConfigService.getConfig({ accept, section }), - }); -/** - * Get Config Value - * @param data The data for the request. - * @param data.section - * @param data.option - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const prefetchUseConfigServiceGetConfigValue = ( - queryClient: QueryClient, - { - accept, - option, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), - queryFn: () => ConfigService.getConfigValue({ accept, option, section }), - }); -/** - * Get Configs - * Get configs for UI. - * @returns ConfigResponse Successful Response - * @throws ApiError - */ -export const prefetchUseConfigServiceGetConfigs = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseConfigServiceGetConfigsKeyFn(), - queryFn: () => ConfigService.getConfigs(), - }); -/** - * List Dag Warnings - * Get a list of DAG warnings. - * @param data The data for the request. - * @param data.dagId - * @param data.warningType - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns DAGWarningCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagWarningServiceListDagWarnings = ( - queryClient: QueryClient, - { - dagId, - limit, - offset, - orderBy, - warningType, - }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), - queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }), - }); -/** - * Get Dags - * Get all DAGs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @param data.dagRunStartDateGte - * @param data.dagRunStartDateLte - * @param data.dagRunEndDateGte - * @param data.dagRunEndDateLte - * @param data.dagRunState - * @param data.orderBy - * @returns DAGCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagServiceGetDags = ( - queryClient: QueryClient, - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagServiceGetDagsKeyFn({ - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }), - queryFn: () => - DagService.getDags({ - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }), - }); -/** - * Get Dag - * Get basic information about a DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagServiceGetDag = ( - queryClient: QueryClient, - { - dagId, - }: { - dagId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), - queryFn: () => DagService.getDag({ dagId }), - }); -/** - * Get Dag Details - * Get details of DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGDetailsResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagServiceGetDagDetails = ( - queryClient: QueryClient, - { - dagId, - }: { - dagId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), - queryFn: () => DagService.getDagDetails({ dagId }), - }); -/** - * Get Dag Tags - * Get all DAG tags. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.tagNamePattern - * @returns DAGTagCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagServiceGetDagTags = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - tagNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), - queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }), - }); -/** - * Get Event Log - * @param data The data for the request. - * @param data.eventLogId - * @returns EventLogResponse Successful Response - * @throws ApiError - */ -export const prefetchUseEventLogServiceGetEventLog = ( - queryClient: QueryClient, - { - eventLogId, - }: { - eventLogId: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), - queryFn: () => EventLogService.getEventLog({ eventLogId }), - }); -/** - * Get Event Logs - * Get all Event Logs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.taskId - * @param data.runId - * @param data.mapIndex - * @param data.tryNumber - * @param data.owner - * @param data.event - * @param data.excludedEvents - * @param data.includedEvents - * @param data.before - * @param data.after - * @returns EventLogCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseEventLogServiceGetEventLogs = ( - queryClient: QueryClient, - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }), - queryFn: () => - EventLogService.getEventLogs({ - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }), - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseExtraLinksServiceGetExtraLinks = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetExtraLinks = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetTaskInstance = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - taskId, - }: { - dagId: string; - dagRunId: string; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), - queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }), - }); -/** - * Get Mapped Task Instances - * Get list of mapped task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstances = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - queryFn: () => - TaskInstanceService.getMappedTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetTaskInstanceDependencies = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - }), - queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetTaskInstanceDependencies1 = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependencies1KeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - }), - queryFn: () => TaskInstanceService.getTaskInstanceDependencies1({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instance Tries - * Get list of task instances history. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetTaskInstanceTries = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Mapped Task Instance Tries - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTries = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - }), - queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Mapped Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstance = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), - queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }), - }); -/** - * Get Task Instances - * Get list of task instances. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs - * and DAG runs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.taskDisplayNamePattern - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetTaskInstances = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - queryFn: () => - TaskInstanceService.getTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }), - }); -/** - * Get Task Instance Try Details - * Get task instance details by try number. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetTaskInstanceTryDetails = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }), - queryFn: () => - TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), - }); -/** - * Get Mapped Task Instance Try Details - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTryDetails = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }), - queryFn: () => - TaskInstanceService.getMappedTaskInstanceTryDetails({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }), - }); -/** - * Get Log - * Get logs for a specific task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.tryNumber - * @param data.fullContent - * @param data.mapIndex - * @param data.token - * @param data.accept - * @returns TaskInstancesLogResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskInstanceServiceGetLog = ( - queryClient: QueryClient, - { - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }), - queryFn: () => - TaskInstanceService.getLog({ - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }), - }); -/** - * Get Import Error - * Get an import error. - * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response - * @throws ApiError - */ -export const prefetchUseImportErrorServiceGetImportError = ( - queryClient: QueryClient, - { - importErrorId, - }: { - importErrorId: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), - queryFn: () => ImportErrorService.getImportError({ importErrorId }), - }); -/** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseImportErrorServiceGetImportErrors = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), - queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }), - }); -/** - * Get Jobs - * Get all jobs. - * @param data The data for the request. - * @param data.isAlive - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.jobState - * @param data.jobType - * @param data.hostname - * @param data.executorClass - * @returns JobCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseJobServiceGetJobs = ( - queryClient: QueryClient, - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseJobServiceGetJobsKeyFn({ - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }), - queryFn: () => - JobService.getJobs({ - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }), - }); -/** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUsePluginServiceGetPlugins = ( - queryClient: QueryClient, - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), - queryFn: () => PluginService.getPlugins({ limit, offset }), - }); -/** - * Get Pool - * Get a pool. - * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response - * @throws ApiError - */ -export const prefetchUsePoolServiceGetPool = ( - queryClient: QueryClient, - { - poolName, - }: { - poolName: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), - queryFn: () => PoolService.getPool({ poolName }), - }); -/** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.poolNamePattern - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUsePoolServiceGetPools = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - poolNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), - queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }), - }); -/** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseProviderServiceGetProviders = ( - queryClient: QueryClient, - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), - queryFn: () => ProviderService.getProviders({ limit, offset }), - }); -/** - * Get Xcom Entry - * Get an XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.xcomKey - * @param data.mapIndex - * @param data.deserialize - * @param data.stringify - * @returns unknown Successful Response - * @throws ApiError - */ -export const prefetchUseXcomServiceGetXcomEntry = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }), - queryFn: () => - XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), - }); -/** - * Get Xcom Entries - * Get all XCom entries. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.xcomKey - * @param data.mapIndex - * @param data.limit - * @param data.offset - * @returns XComCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseXcomServiceGetXcomEntries = ( - queryClient: QueryClient, - { - dagId, - dagRunId, - limit, - mapIndex, - offset, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ - dagId, - dagRunId, - limit, - mapIndex, - offset, - taskId, - xcomKey, - }), - queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), - }); -/** - * Get Tasks - * Get tasks for DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskServiceGetTasks = ( - queryClient: QueryClient, - { - dagId, - orderBy, - }: { - dagId: string; - orderBy?: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), - queryFn: () => TaskService.getTasks({ dagId, orderBy }), - }); -/** - * Get Task - * Get simplified representation of a task. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response - * @throws ApiError - */ -export const prefetchUseTaskServiceGetTask = ( - queryClient: QueryClient, - { - dagId, - taskId, - }: { - dagId: string; - taskId: unknown; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), - queryFn: () => TaskService.getTask({ dagId, taskId }), - }); -/** - * Get Variable - * Get a variable entry. - * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response - * @throws ApiError - */ -export const prefetchUseVariableServiceGetVariable = ( - queryClient: QueryClient, - { - variableKey, - }: { - variableKey: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), - queryFn: () => VariableService.getVariable({ variableKey }), - }); -/** - * Get Variables - * Get all Variables entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.variableKeyPattern - * @returns VariableCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseVariableServiceGetVariables = ( - queryClient: QueryClient, - { - limit, - offset, - orderBy, - variableKeyPattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), - queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }), - }); -/** - * Get Dag Version - * Get one Dag Version. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @returns DagVersionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagVersionServiceGetDagVersion = ( - queryClient: QueryClient, - { - dagId, - versionNumber, - }: { - dagId: string; - versionNumber: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), - queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }), - }); -/** - * Get Dag Versions - * Get all DAG Versions. - * - * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.versionNumber - * @param data.bundleName - * @param data.bundleVersion - * @param data.orderBy - * @returns DAGVersionCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagVersionServiceGetDagVersions = ( - queryClient: QueryClient, - { - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }), - queryFn: () => - DagVersionService.getDagVersions({ - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }), - }); -/** - * Get Health - * @returns HealthInfoResponse Successful Response - * @throws ApiError - */ -export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(), - queryFn: () => MonitorService.getHealth(), - }); -/** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ -export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseVersionServiceGetVersionKeyFn(), - queryFn: () => VersionService.getVersion(), - }); -/** - * Login - * Redirect to the login URL depending on the AuthManager configured. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const prefetchUseLoginServiceLogin = ( - queryClient: QueryClient, - { - next, - }: { - next?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseLoginServiceLoginKeyFn({ next }), - queryFn: () => LoginService.login({ next }), - }); -/** - * Logout - * Logout the user. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const prefetchUseLoginServiceLogout = ( - queryClient: QueryClient, - { - next, - }: { - next?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), - queryFn: () => LoginService.logout({ next }), - }); -/** - * Get Auth Menus - * @returns MenuItemCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseAuthLinksServiceGetAuthMenus = (queryClient: QueryClient) => - queryClient.prefetchQuery({ - queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), - queryFn: () => AuthLinksService.getAuthMenus(), - }); -/** - * Recent Dag Runs - * Get recent DAG runs. - * @param data The data for the request. - * @param data.dagRunsLimit - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIds - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @returns DAGWithLatestDagRunsCollectionResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDagsServiceRecentDagRuns = ( - queryClient: QueryClient, - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDagsServiceRecentDagRunsKeyFn({ - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }), - queryFn: () => - DagsService.recentDagRuns({ - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }), - }); -/** - * Get Dependencies - * Dependencies graph. - * @param data The data for the request. - * @param data.nodeId - * @returns BaseGraphResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDependenciesServiceGetDependencies = ( - queryClient: QueryClient, - { - nodeId, - }: { - nodeId?: string; - } = {}, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), - queryFn: () => DependenciesService.getDependencies({ nodeId }), - }); -/** - * Historical Metrics - * Return cluster activity historical metrics. - * @param data The data for the request. - * @param data.startDate - * @param data.endDate - * @returns HistoricalMetricDataResponse Successful Response - * @throws ApiError - */ -export const prefetchUseDashboardServiceHistoricalMetrics = ( - queryClient: QueryClient, - { - endDate, - startDate, - }: { - endDate?: string; - startDate: string; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), - queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }), - }); -/** - * Structure Data - * Get Structure Data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.externalDependencies - * @param data.versionNumber - * @returns StructureDataResponse Successful Response - * @throws ApiError - */ -export const prefetchUseStructureServiceStructureData = ( - queryClient: QueryClient, - { - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseStructureServiceStructureDataKeyFn({ - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }), - queryFn: () => - StructureService.structureData({ - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }), - }); -/** - * Grid Data - * Return grid data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.offset - * @param data.runType - * @param data.state - * @param data.limit - * @param data.orderBy - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @returns GridResponse Successful Response - * @throws ApiError - */ -export const prefetchUseGridServiceGridData = ( - queryClient: QueryClient, - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; - }, -) => - queryClient.prefetchQuery({ - queryKey: Common.UseGridServiceGridDataKeyFn({ - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }), - queryFn: () => - GridService.gridData({ - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }), - }); +* Get Assets +* Get assets. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.uriPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagIds +* @param data.onlyActive +* @param data.orderBy +* @returns AssetCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetAssets = (queryClient: QueryClient, { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string[]; + uriPattern?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) }); +/** +* Get Asset Aliases +* Get asset aliases. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.orderBy +* @returns AssetAliasCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetAssetAliases = (queryClient: QueryClient, { limit, namePattern, offset, orderBy }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) }); +/** +* Get Asset Alias +* Get an asset alias. +* @param data The data for the request. +* @param data.assetAliasId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetAssetAlias = (queryClient: QueryClient, { assetAliasId }: { + assetAliasId: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) }); +/** +* Get Asset Events +* Get asset events. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.assetId +* @param data.sourceDagId +* @param data.sourceTaskId +* @param data.sourceRunId +* @param data.sourceMapIndex +* @param data.timestampGte +* @param data.timestampLte +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetAssetEvents = (queryClient: QueryClient, { assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string[]; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) }); +/** +* Get Asset Queued Events +* Get queued asset events for an asset. +* @param data The data for the request. +* @param data.assetId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetAssetQueuedEvents = (queryClient: QueryClient, { assetId, before }: { + assetId: number; + before?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) }); +/** +* Get Asset +* Get an asset. +* @param data The data for the request. +* @param data.assetId +* @returns AssetResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetAsset = (queryClient: QueryClient, { assetId }: { + assetId: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), queryFn: () => AssetService.getAsset({ assetId }) }); +/** +* Get Dag Asset Queued Events +* Get queued asset events for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetDagAssetQueuedEvents = (queryClient: QueryClient, { before, dagId }: { + before?: string; + dagId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) }); +/** +* Get Dag Asset Queued Event +* Get a queued asset event for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.assetId +* @param data.before +* @returns QueuedEventResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceGetDagAssetQueuedEvent = (queryClient: QueryClient, { assetId, before, dagId }: { + assetId: number; + before?: string; + dagId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) }); +/** +* Next Run Assets +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseAssetServiceNextRunAssets = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), queryFn: () => AssetService.nextRunAssets({ dagId }) }); +/** +* List Backfills +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseBackfillServiceListBackfills = (queryClient: QueryClient, { dagId, limit, offset, orderBy }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; +}) => queryClient.prefetchQuery({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) }); +/** +* Get Backfill +* @param data The data for the request. +* @param data.backfillId +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseBackfillServiceGetBackfill = (queryClient: QueryClient, { backfillId }: { + backfillId: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), queryFn: () => BackfillService.getBackfill({ backfillId }) }); +/** +* List Backfills Ui +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.active +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseBackfillServiceListBackfillsUi = (queryClient: QueryClient, { active, dagId, limit, offset, orderBy }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseBackfillServiceListBackfillsUiKeyFn({ active, dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfillsUi({ active, dagId, limit, offset, orderBy }) }); +/** +* Get Connection +* Get a connection entry. +* @param data The data for the request. +* @param data.connectionId +* @returns ConnectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseConnectionServiceGetConnection = (queryClient: QueryClient, { connectionId }: { + connectionId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), queryFn: () => ConnectionService.getConnection({ connectionId }) }); +/** +* Get Connections +* Get all connection entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.connectionIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns ConnectionCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseConnectionServiceGetConnections = (queryClient: QueryClient, { connectionIdPattern, limit, offset, orderBy }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) }); +/** +* Hook Meta Data +* Retrieve information about available connection types (hook classes) and their parameters. +* @returns ConnectionHookMetaData Successful Response +* @throws ApiError +*/ +export const prefetchUseConnectionServiceHookMetaData = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), queryFn: () => ConnectionService.hookMetaData() }); +/** +* Get Dag Run +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns DAGRunResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagRunServiceGetDagRun = (queryClient: QueryClient, { dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) }); +/** +* Get Upstream Asset Events +* If dag run is asset-triggered, return the asset events that triggered it. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagRunServiceGetUpstreamAssetEvents = (queryClient: QueryClient, { dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) }); +/** +* Get Dag Runs +* Get all DAG Runs. +* +* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.runType +* @param data.state +* @param data.orderBy +* @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.triggeringUserNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGRunCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagRunServiceGetDagRuns = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; + runIdPattern?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + triggeringUserNamePattern?: string; + updatedAtGte?: string; + updatedAtLte?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseDagRunServiceWaitDagRunUntilFinished = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseExperimentalServiceWaitDagRunUntilFinished = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.prefetchQuery({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); +/** +* Get Dag Source +* Get source code using file token. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @param data.accept +* @returns DAGSourceResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagSourceServiceGetDagSource = (queryClient: QueryClient, { accept, dagId, versionNumber }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) }); +/** +* Get Dag Stats +* Get Dag statistics. +* @param data The data for the request. +* @param data.dagIds +* @returns DagStatsCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagStatsServiceGetDagStats = (queryClient: QueryClient, { dagIds }: { + dagIds?: string[]; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), queryFn: () => DagStatsService.getDagStats({ dagIds }) }); +/** +* Get Dag Reports +* Get DAG report. +* @param data The data for the request. +* @param data.subdir +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseDagReportServiceGetDagReports = (queryClient: QueryClient, { subdir }: { + subdir: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), queryFn: () => DagReportService.getDagReports({ subdir }) }); +/** +* Get Config +* @param data The data for the request. +* @param data.section +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const prefetchUseConfigServiceGetConfig = (queryClient: QueryClient, { accept, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), queryFn: () => ConfigService.getConfig({ accept, section }) }); +/** +* Get Config Value +* @param data The data for the request. +* @param data.section +* @param data.option +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const prefetchUseConfigServiceGetConfigValue = (queryClient: QueryClient, { accept, option, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) }); +/** +* Get Configs +* Get configs for UI. +* @returns ConfigResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseConfigServiceGetConfigs = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(), queryFn: () => ConfigService.getConfigs() }); +/** +* List Dag Warnings +* Get a list of DAG warnings. +* @param data The data for the request. +* @param data.dagId +* @param data.warningType +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns DAGWarningCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagWarningServiceListDagWarnings = (queryClient: QueryClient, { dagId, limit, offset, orderBy, warningType }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + warningType?: DagWarningType; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) }); +/** +* Get Dags +* Get all DAGs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.dagRunStartDateGte +* @param data.dagRunStartDateLte +* @param data.dagRunEndDateGte +* @param data.dagRunEndDateLte +* @param data.dagRunState +* @param data.orderBy +* @param data.isFavorite +* @returns DAGCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagServiceGetDags = (queryClient: QueryClient, { bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +/** +* Get Dag +* Get basic information about a DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagServiceGetDag = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), queryFn: () => DagService.getDag({ dagId }) }); +/** +* Get Dag Details +* Get details of DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGDetailsResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagServiceGetDagDetails = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), queryFn: () => DagService.getDagDetails({ dagId }) }); +/** +* Get Dag Tags +* Get all DAG tags. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.tagNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGTagCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagServiceGetDagTags = (queryClient: QueryClient, { limit, offset, orderBy, tagNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + tagNamePattern?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) }); +/** +* Get Dags +* Get DAGs with recent DagRun. +* @param data The data for the request. +* @param data.dagRunsLimit +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIds +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @param data.isFavorite +* @returns DAGWithLatestDagRunsCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagServiceGetDagsUi = (queryClient: QueryClient, { bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDagsUi({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +/** +* Get Latest Run Info +* Get latest run. +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseDagServiceGetLatestRunInfo = (queryClient: QueryClient, { dagId }: { + dagId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetLatestRunInfoKeyFn({ dagId }), queryFn: () => DagService.getLatestRunInfo({ dagId }) }); +/** +* Get Event Log +* @param data The data for the request. +* @param data.eventLogId +* @returns EventLogResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseEventLogServiceGetEventLog = (queryClient: QueryClient, { eventLogId }: { + eventLogId: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), queryFn: () => EventLogService.getEventLog({ eventLogId }) }); +/** +* Get Event Logs +* Get all Event Logs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.taskId +* @param data.runId +* @param data.mapIndex +* @param data.tryNumber +* @param data.owner +* @param data.event +* @param data.excludedEvents +* @param data.includedEvents +* @param data.before +* @param data.after +* @returns EventLogCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseEventLogServiceGetEventLogs = (queryClient: QueryClient, { after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string[]; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseExtraLinksServiceGetExtraLinks = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetExtraLinks = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetTaskInstance = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Task Instances +* Get list of mapped task instances. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstances = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndex = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetTaskInstanceDependencies = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instance Tries +* Get list of task instances history. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetTaskInstanceTries = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Mapped Task Instance Tries +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTries = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Mapped Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstance = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Task Instances +* Get list of task instances. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs +* and DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.taskDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetTaskInstances = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); +/** +* Get Task Instance Try Details +* Get task instance details by try number. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetTaskInstanceTryDetails = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); +/** +* Get Mapped Task Instance Try Details +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTryDetails = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); +/** +* Get Log +* Get logs for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.fullContent +* @param data.mapIndex +* @param data.token +* @param data.accept +* @returns TaskInstancesLogResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetLog = (queryClient: QueryClient, { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) }); +/** +* Get External Log Url +* Get external log URL for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.mapIndex +* @returns ExternalLogUrlResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskInstanceServiceGetExternalLogUrl = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, tryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + tryNumber: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetExternalLogUrlKeyFn({ dagId, dagRunId, mapIndex, taskId, tryNumber }), queryFn: () => TaskInstanceService.getExternalLogUrl({ dagId, dagRunId, mapIndex, taskId, tryNumber }) }); +/** +* Get Import Error +* Get an import error. +* @param data The data for the request. +* @param data.importErrorId +* @returns ImportErrorResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseImportErrorServiceGetImportError = (queryClient: QueryClient, { importErrorId }: { + importErrorId: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), queryFn: () => ImportErrorService.getImportError({ importErrorId }) }); +/** +* Get Import Errors +* Get all import errors. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns ImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseImportErrorServiceGetImportErrors = (queryClient: QueryClient, { limit, offset, orderBy }: { + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) }); +/** +* Get Jobs +* Get all jobs. +* @param data The data for the request. +* @param data.isAlive +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.jobState +* @param data.jobType +* @param data.hostname +* @param data.executorClass +* @returns JobCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseJobServiceGetJobs = (queryClient: QueryClient, { endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + startDateGte?: string; + startDateLte?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) }); +/** +* Get Plugins +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns PluginCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUsePluginServiceGetPlugins = (queryClient: QueryClient, { limit, offset }: { + limit?: number; + offset?: number; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), queryFn: () => PluginService.getPlugins({ limit, offset }) }); +/** +* Import Errors +* @returns PluginImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUsePluginServiceImportErrors = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UsePluginServiceImportErrorsKeyFn(), queryFn: () => PluginService.importErrors() }); +/** +* Get Pool +* Get a pool. +* @param data The data for the request. +* @param data.poolName +* @returns PoolResponse Successful Response +* @throws ApiError +*/ +export const prefetchUsePoolServiceGetPool = (queryClient: QueryClient, { poolName }: { + poolName: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), queryFn: () => PoolService.getPool({ poolName }) }); +/** +* Get Pools +* Get all pools entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.poolNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns PoolCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUsePoolServiceGetPools = (queryClient: QueryClient, { limit, offset, orderBy, poolNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + poolNamePattern?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) }); +/** +* Get Providers +* Get providers. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns ProviderCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseProviderServiceGetProviders = (queryClient: QueryClient, { limit, offset }: { + limit?: number; + offset?: number; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), queryFn: () => ProviderService.getProviders({ limit, offset }) }); +/** +* Get Xcom Entry +* Get an XCom entry. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @param data.dagRunId +* @param data.xcomKey +* @param data.mapIndex +* @param data.deserialize +* @param data.stringify +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseXcomServiceGetXcomEntry = (queryClient: QueryClient, { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) }); +/** +* Get Xcom Entries +* Get all XCom entries. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.xcomKey +* @param data.mapIndex +* @param data.limit +* @param data.offset +* @returns XComCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseXcomServiceGetXcomEntries = (queryClient: QueryClient, { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) }); +/** +* Get Tasks +* Get tasks for DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.orderBy +* @returns TaskCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskServiceGetTasks = (queryClient: QueryClient, { dagId, orderBy }: { + dagId: string; + orderBy?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), queryFn: () => TaskService.getTasks({ dagId, orderBy }) }); +/** +* Get Task +* Get simplified representation of a task. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @returns TaskResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseTaskServiceGetTask = (queryClient: QueryClient, { dagId, taskId }: { + dagId: string; + taskId: unknown; +}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), queryFn: () => TaskService.getTask({ dagId, taskId }) }); +/** +* Get Variable +* Get a variable entry. +* @param data The data for the request. +* @param data.variableKey +* @returns VariableResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseVariableServiceGetVariable = (queryClient: QueryClient, { variableKey }: { + variableKey: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), queryFn: () => VariableService.getVariable({ variableKey }) }); +/** +* Get Variables +* Get all Variables entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.variableKeyPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns VariableCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseVariableServiceGetVariables = (queryClient: QueryClient, { limit, offset, orderBy, variableKeyPattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + variableKeyPattern?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) }); +/** +* Get Dag Version +* Get one Dag Version. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @returns DagVersionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagVersionServiceGetDagVersion = (queryClient: QueryClient, { dagId, versionNumber }: { + dagId: string; + versionNumber: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) }); +/** +* Get Dag Versions +* Get all DAG Versions. +* +* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.versionNumber +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @returns DAGVersionCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDagVersionServiceGetDagVersions = (queryClient: QueryClient, { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + versionNumber?: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); +/** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetHitlDetail = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetMappedTiHitlDetail = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagRunId +* @param data.taskId +* @param data.taskIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.responseReceived +* @param data.userId +* @param data.subjectSearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.bodySearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetHitlDetails = (queryClient: QueryClient, { bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }: { + bodySearch?: string; + dagId?: string; + dagIdPattern?: string; + dagRunId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + responseReceived?: boolean; + state?: string[]; + subjectSearch?: string; + taskId?: string; + taskIdPattern?: string; + userId?: string[]; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }), queryFn: () => HumanInTheLoopService.getHitlDetails({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }) }); +/** +* Get Health +* @returns HealthInfoResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(), queryFn: () => MonitorService.getHealth() }); +/** +* Get Version +* Get version information. +* @returns VersionInfo Successful Response +* @throws ApiError +*/ +export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseVersionServiceGetVersionKeyFn(), queryFn: () => VersionService.getVersion() }); +/** +* Login +* Redirect to the login URL depending on the AuthManager configured. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseLoginServiceLogin = (queryClient: QueryClient, { next }: { + next?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }), queryFn: () => LoginService.login({ next }) }); +/** +* Logout +* Logout the user. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseLoginServiceLogout = (queryClient: QueryClient, { next }: { + next?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), queryFn: () => LoginService.logout({ next }) }); +/** +* Refresh +* Refresh the authentication token. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseLoginServiceRefresh = (queryClient: QueryClient, { next }: { + next?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseLoginServiceRefreshKeyFn({ next }), queryFn: () => LoginService.refresh({ next }) }); +/** +* Get Auth Menus +* @returns MenuItemCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseAuthLinksServiceGetAuthMenus = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), queryFn: () => AuthLinksService.getAuthMenus() }); +/** +* Get Dependencies +* Dependencies graph. +* @param data The data for the request. +* @param data.nodeId +* @returns BaseGraphResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDependenciesServiceGetDependencies = (queryClient: QueryClient, { nodeId }: { + nodeId?: string; +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), queryFn: () => DependenciesService.getDependencies({ nodeId }) }); +/** +* Historical Metrics +* Return cluster activity historical metrics. +* @param data The data for the request. +* @param data.startDate +* @param data.endDate +* @returns HistoricalMetricDataResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDashboardServiceHistoricalMetrics = (queryClient: QueryClient, { endDate, startDate }: { + endDate?: string; + startDate: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) }); +/** +* Dag Stats +* Return basic DAG stats with counts of DAGs in various states. +* @returns DashboardDagStatsResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseDashboardServiceDagStats = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(), queryFn: () => DashboardService.dagStats() }); +/** +* Structure Data +* Get Structure Data. +* @param data The data for the request. +* @param data.dagId +* @param data.includeUpstream +* @param data.includeDownstream +* @param data.root +* @param data.externalDependencies +* @param data.versionNumber +* @returns StructureDataResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseStructureServiceStructureData = (queryClient: QueryClient, { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; +}) => queryClient.prefetchQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) }); +/** +* Get Dag Structure +* Return dag structure for grid view. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridNodeResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseGridServiceGetDagStructure = (queryClient: QueryClient, { dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseGridServiceGetDagStructureKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }), queryFn: () => GridService.getDagStructure({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) }); +/** +* Get Grid Runs +* Get info about a run for the grid. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridRunsResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseGridServiceGetGridRuns = (queryClient: QueryClient, { dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseGridServiceGetGridRunsKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }), queryFn: () => GridService.getGridRuns({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) }); +/** +* Get Grid Ti Summaries +* Get states for TIs / "groups" of TIs. +* +* Essentially this is to know what color to put in the squares in the grid. +* +* The tricky part here is that we aggregate the state for groups and mapped tasks. +* +* We don't add all the TIs for mapped TIs -- we only add one entry for the mapped task and +* its state is an aggregate of its TI states. +* +* And for task groups, we add a "task" for that which is not really a task but is just +* an entry that represents the group (so that we can show a filled in box when the group +* is not expanded) and its state is an agg of those within it. +* @param data The data for the request. +* @param data.dagId +* @param data.runId +* @returns GridTISummaries Successful Response +* @throws ApiError +*/ +export const prefetchUseGridServiceGetGridTiSummaries = (queryClient: QueryClient, { dagId, runId }: { + dagId: string; + runId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseGridServiceGetGridTiSummariesKeyFn({ dagId, runId }), queryFn: () => GridService.getGridTiSummaries({ dagId, runId }) }); +/** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseCalendarServiceGetCalendar = (queryClient: QueryClient, { dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts index ae20d5e3d762b..f5b65f7596b10 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts @@ -1,4729 +1,2263 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { - AssetService, - AuthLinksService, - BackfillService, - ConfigService, - ConnectionService, - DagParsingService, - DagReportService, - DagRunService, - DagService, - DagSourceService, - DagStatsService, - DagVersionService, - DagWarningService, - DagsService, - DashboardService, - DependenciesService, - EventLogService, - ExtraLinksService, - GridService, - ImportErrorService, - JobService, - LoginService, - MonitorService, - PluginService, - PoolService, - ProviderService, - StructureService, - TaskInstanceService, - TaskService, - VariableService, - VersionService, - XcomService, -} from "../requests/services.gen"; -import { - BackfillPostBody, - BulkBody_ConnectionBody_, - BulkBody_PoolBody_, - BulkBody_VariableBody_, - ClearTaskInstancesBody, - ConnectionBody, - CreateAssetEventsBody, - DAGPatchBody, - DAGRunClearBody, - DAGRunPatchBody, - DAGRunsBatchBody, - DagRunState, - DagWarningType, - PatchTaskInstanceBody, - PoolBody, - PoolPatchBody, - TaskInstancesBatchBody, - TriggerDAGRunPostBody, - VariableBody, - XComCreateBody, - XComUpdateBody, -} from "../requests/types.gen"; +import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { BackfillPostBody, BulkBody_BulkTaskInstanceBody_, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, UpdateHITLDetailPayload, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; import * as Common from "./common"; - /** - * Get Assets - * Get assets. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.uriPattern - * @param data.dagIds - * @param data.onlyActive - * @param data.orderBy - * @returns AssetCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssets = < - TData = Common.AssetServiceGetAssetsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetAssetsKeyFn( - { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, - queryKey, - ), - queryFn: () => - AssetService.getAssets({ - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }) as TData, - ...options, - }); -/** - * Get Asset Aliases - * Get asset aliases. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.orderBy - * @returns AssetAliasCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetAliases = < - TData = Common.AssetServiceGetAssetAliasesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - namePattern, - offset, - orderBy, - }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), - queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Asset Alias - * Get an asset alias. - * @param data The data for the request. - * @param data.assetAliasId - * @returns unknown Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetAlias = < - TData = Common.AssetServiceGetAssetAliasDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetAliasId, - }: { - assetAliasId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), - queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, - ...options, - }); -/** - * Get Asset Events - * Get asset events. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.assetId - * @param data.sourceDagId - * @param data.sourceTaskId - * @param data.sourceRunId - * @param data.sourceMapIndex - * @param data.timestampGte - * @param data.timestampLte - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetEvents = < - TData = Common.AssetServiceGetAssetEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetAssetEventsKeyFn( - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }, - queryKey, - ), - queryFn: () => - AssetService.getAssetEvents({ - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }) as TData, - ...options, - }); -/** - * Get Asset Queued Events - * Get queued asset events for an asset. - * @param data The data for the request. - * @param data.assetId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetQueuedEvents = < - TData = Common.AssetServiceGetAssetQueuedEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - before, - }: { - assetId: number; - before?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), - queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, - ...options, - }); -/** - * Get Asset - * Get an asset. - * @param data The data for the request. - * @param data.assetId - * @returns AssetResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAsset = < - TData = Common.AssetServiceGetAssetDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - }: { - assetId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), - queryFn: () => AssetService.getAsset({ assetId }) as TData, - ...options, - }); -/** - * Get Dag Asset Queued Events - * Get queued asset events for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetDagAssetQueuedEvents = < - TData = Common.AssetServiceGetDagAssetQueuedEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - before, - dagId, - }: { - before?: string; - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), - queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, - ...options, - }); -/** - * Get Dag Asset Queued Event - * Get a queued asset event for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.assetId - * @param data.before - * @returns QueuedEventResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetDagAssetQueuedEvent = < - TData = Common.AssetServiceGetDagAssetQueuedEventDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - before, - dagId, - }: { - assetId: number; - before?: string; - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), - queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, - ...options, - }); -/** - * Next Run Assets - * @param data The data for the request. - * @param data.dagId - * @returns unknown Successful Response - * @throws ApiError - */ -export const useAssetServiceNextRunAssets = < - TData = Common.AssetServiceNextRunAssetsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - }: { - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), - queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, - ...options, - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceListBackfills = < - TData = Common.BackfillServiceListBackfillsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - limit, - offset, - orderBy, - }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), - queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceGetBackfill = < - TData = Common.BackfillServiceGetBackfillDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - backfillId, - }: { - backfillId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), - queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, - ...options, - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.active - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceListBackfills1 = < - TData = Common.BackfillServiceListBackfills1DefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - active, - dagId, - limit, - offset, - orderBy, - }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseBackfillServiceListBackfills1KeyFn( - { active, dagId, limit, offset, orderBy }, - queryKey, - ), - queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceGetConnection = < - TData = Common.ConnectionServiceGetConnectionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - connectionId, - }: { - connectionId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), - queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, - ...options, - }); -/** - * Get Connections - * Get all connection entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.connectionIdPattern - * @returns ConnectionCollectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceGetConnections = < - TData = Common.ConnectionServiceGetConnectionsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - connectionIdPattern, - limit, - offset, - orderBy, - }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( - { connectionIdPattern, limit, offset, orderBy }, - queryKey, - ), - queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Hook Meta Data - * Retrieve information about available connection types (hook classes) and their parameters. - * @returns ConnectionHookMetaData Successful Response - * @throws ApiError - */ -export const useConnectionServiceHookMetaData = < - TData = Common.ConnectionServiceHookMetaDataDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), - queryFn: () => ConnectionService.hookMetaData() as TData, - ...options, - }); -/** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetDagRun = < - TData = Common.DagRunServiceGetDagRunDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, - ...options, - }); -/** - * Get Upstream Asset Events - * If dag run is asset-triggered, return the asset events that triggered it. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetUpstreamAssetEvents = < - TData = Common.DagRunServiceGetUpstreamAssetEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), - queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, - ...options, - }); -/** - * Get Dag Runs - * Get all DAG Runs. - * - * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.runType - * @param data.state - * @param data.orderBy - * @returns DAGRunCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetDagRuns = < - TData = Common.DagRunServiceGetDagRunsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagRunServiceGetDagRunsKeyFn( - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }, - queryKey, - ), - queryFn: () => - DagRunService.getDagRuns({ - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }) as TData, - ...options, - }); -/** - * Get Dag Source - * Get source code using file token. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @param data.accept - * @returns DAGSourceResponse Successful Response - * @throws ApiError - */ -export const useDagSourceServiceGetDagSource = < - TData = Common.DagSourceServiceGetDagSourceDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - dagId, - versionNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), - queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, - ...options, - }); -/** - * Get Dag Stats - * Get Dag statistics. - * @param data The data for the request. - * @param data.dagIds - * @returns DagStatsCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagStatsServiceGetDagStats = < - TData = Common.DagStatsServiceGetDagStatsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagIds, - }: { - dagIds?: string[]; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), - queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, - ...options, - }); -/** - * Get Dag Reports - * Get DAG report. - * @param data The data for the request. - * @param data.subdir - * @returns unknown Successful Response - * @throws ApiError - */ -export const useDagReportServiceGetDagReports = < - TData = Common.DagReportServiceGetDagReportsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - subdir, - }: { - subdir: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), - queryFn: () => DagReportService.getDagReports({ subdir }) as TData, - ...options, - }); -/** - * Get Config - * @param data The data for the request. - * @param data.section - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const useConfigServiceGetConfig = < - TData = Common.ConfigServiceGetConfigDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), - queryFn: () => ConfigService.getConfig({ accept, section }) as TData, - ...options, - }); -/** - * Get Config Value - * @param data The data for the request. - * @param data.section - * @param data.option - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const useConfigServiceGetConfigValue = < - TData = Common.ConfigServiceGetConfigValueDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - option, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), - queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, - ...options, - }); -/** - * Get Configs - * Get configs for UI. - * @returns ConfigResponse Successful Response - * @throws ApiError - */ -export const useConfigServiceGetConfigs = < - TData = Common.ConfigServiceGetConfigsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), - queryFn: () => ConfigService.getConfigs() as TData, - ...options, - }); -/** - * List Dag Warnings - * Get a list of DAG warnings. - * @param data The data for the request. - * @param data.dagId - * @param data.warningType - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns DAGWarningCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagWarningServiceListDagWarnings = < - TData = Common.DagWarningServiceListDagWarningsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - limit, - offset, - orderBy, - warningType, - }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( - { dagId, limit, offset, orderBy, warningType }, - queryKey, - ), - queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, - ...options, - }); -/** - * Get Dags - * Get all DAGs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @param data.dagRunStartDateGte - * @param data.dagRunStartDateLte - * @param data.dagRunEndDateGte - * @param data.dagRunEndDateLte - * @param data.dagRunState - * @param data.orderBy - * @returns DAGCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDags = < - TData = Common.DagServiceGetDagsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagServiceGetDagsKeyFn( - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }, - queryKey, - ), - queryFn: () => - DagService.getDags({ - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }) as TData, - ...options, - }); -/** - * Get Dag - * Get basic information about a DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDag = < - TData = Common.DagServiceGetDagDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - }: { - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), - queryFn: () => DagService.getDag({ dagId }) as TData, - ...options, - }); -/** - * Get Dag Details - * Get details of DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGDetailsResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDagDetails = < - TData = Common.DagServiceGetDagDetailsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - }: { - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), - queryFn: () => DagService.getDagDetails({ dagId }) as TData, - ...options, - }); -/** - * Get Dag Tags - * Get all DAG tags. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.tagNamePattern - * @returns DAGTagCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDagTags = < - TData = Common.DagServiceGetDagTagsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - tagNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), - queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, - ...options, - }); -/** - * Get Event Log - * @param data The data for the request. - * @param data.eventLogId - * @returns EventLogResponse Successful Response - * @throws ApiError - */ -export const useEventLogServiceGetEventLog = < - TData = Common.EventLogServiceGetEventLogDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - eventLogId, - }: { - eventLogId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), - queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, - ...options, - }); -/** - * Get Event Logs - * Get all Event Logs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.taskId - * @param data.runId - * @param data.mapIndex - * @param data.tryNumber - * @param data.owner - * @param data.event - * @param data.excludedEvents - * @param data.includedEvents - * @param data.before - * @param data.after - * @returns EventLogCollectionResponse Successful Response - * @throws ApiError - */ -export const useEventLogServiceGetEventLogs = < - TData = Common.EventLogServiceGetEventLogsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseEventLogServiceGetEventLogsKeyFn( - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }, - queryKey, - ), - queryFn: () => - EventLogService.getEventLogs({ - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }) as TData, - ...options, - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const useExtraLinksServiceGetExtraLinks = < - TData = Common.ExtraLinksServiceGetExtraLinksDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), - queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetExtraLinks = < - TData = Common.TaskInstanceServiceGetExtraLinksDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstance = < - TData = Common.TaskInstanceServiceGetTaskInstanceDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - taskId, - }: { - dagId: string; - dagRunId: string; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), - queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, - ...options, - }); -/** - * Get Mapped Task Instances - * Get list of mapped task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstances = < - TData = Common.TaskInstanceServiceGetMappedTaskInstancesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getMappedTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }) as TData, - ...options, - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceDependencies = < - TData = Common.TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceDependencies1 = < - TData = Common.TaskInstanceServiceGetTaskInstanceDependencies1DefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependencies1KeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstanceDependencies1({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instance Tries - * Get list of task instances history. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceTries = < - TData = Common.TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Mapped Task Instance Tries - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstanceTries = < - TData = Common.TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Mapped Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstance = < - TData = Common.TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instances - * Get list of task instances. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs - * and DAG runs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.taskDisplayNamePattern - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstances = < - TData = Common.TaskInstanceServiceGetTaskInstancesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }) as TData, - ...options, - }); -/** - * Get Task Instance Try Details - * Get task instance details by try number. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceTryDetails = < - TData = Common.TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn( - { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstanceTryDetails({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }) as TData, - ...options, - }); -/** - * Get Mapped Task Instance Try Details - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstanceTryDetails = < - TData = Common.TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn( - { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getMappedTaskInstanceTryDetails({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }) as TData, - ...options, - }); -/** - * Get Log - * Get logs for a specific task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.tryNumber - * @param data.fullContent - * @param data.mapIndex - * @param data.token - * @param data.accept - * @returns TaskInstancesLogResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetLog = < - TData = Common.TaskInstanceServiceGetLogDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskInstanceServiceGetLogKeyFn( - { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getLog({ - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }) as TData, - ...options, - }); -/** - * Get Import Error - * Get an import error. - * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response - * @throws ApiError - */ -export const useImportErrorServiceGetImportError = < - TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - importErrorId, - }: { - importErrorId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), - queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, - ...options, - }); -/** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ -export const useImportErrorServiceGetImportErrors = < - TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), - queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Jobs - * Get all jobs. - * @param data The data for the request. - * @param data.isAlive - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.jobState - * @param data.jobType - * @param data.hostname - * @param data.executorClass - * @returns JobCollectionResponse Successful Response - * @throws ApiError - */ -export const useJobServiceGetJobs = < - TData = Common.JobServiceGetJobsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseJobServiceGetJobsKeyFn( - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }, - queryKey, - ), - queryFn: () => - JobService.getJobs({ - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }) as TData, - ...options, - }); -/** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ -export const usePluginServiceGetPlugins = < - TData = Common.PluginServiceGetPluginsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), - queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, - ...options, - }); -/** - * Get Pool - * Get a pool. - * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response - * @throws ApiError - */ -export const usePoolServiceGetPool = < - TData = Common.PoolServiceGetPoolDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - poolName, - }: { - poolName: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), - queryFn: () => PoolService.getPool({ poolName }) as TData, - ...options, - }); -/** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.poolNamePattern - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ -export const usePoolServiceGetPools = < - TData = Common.PoolServiceGetPoolsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - poolNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), - queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, - ...options, - }); -/** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ -export const useProviderServiceGetProviders = < - TData = Common.ProviderServiceGetProvidersDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), - queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, - ...options, - }); -/** - * Get Xcom Entry - * Get an XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.xcomKey - * @param data.mapIndex - * @param data.deserialize - * @param data.stringify - * @returns unknown Successful Response - * @throws ApiError - */ -export const useXcomServiceGetXcomEntry = < - TData = Common.XcomServiceGetXcomEntryDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseXcomServiceGetXcomEntryKeyFn( - { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, - queryKey, - ), - queryFn: () => - XcomService.getXcomEntry({ - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }) as TData, - ...options, - }); -/** - * Get Xcom Entries - * Get all XCom entries. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.xcomKey - * @param data.mapIndex - * @param data.limit - * @param data.offset - * @returns XComCollectionResponse Successful Response - * @throws ApiError - */ -export const useXcomServiceGetXcomEntries = < - TData = Common.XcomServiceGetXcomEntriesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - limit, - mapIndex, - offset, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn( - { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, - queryKey, - ), - queryFn: () => - XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, - ...options, - }); -/** - * Get Tasks - * Get tasks for DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskServiceGetTasks = < - TData = Common.TaskServiceGetTasksDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - orderBy, - }: { - dagId: string; - orderBy?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), - queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, - ...options, - }); -/** - * Get Task - * Get simplified representation of a task. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response - * @throws ApiError - */ -export const useTaskServiceGetTask = < - TData = Common.TaskServiceGetTaskDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - taskId, - }: { - dagId: string; - taskId: unknown; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), - queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, - ...options, - }); -/** - * Get Variable - * Get a variable entry. - * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response - * @throws ApiError - */ -export const useVariableServiceGetVariable = < - TData = Common.VariableServiceGetVariableDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - variableKey, - }: { - variableKey: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), - queryFn: () => VariableService.getVariable({ variableKey }) as TData, - ...options, - }); -/** - * Get Variables - * Get all Variables entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.variableKeyPattern - * @returns VariableCollectionResponse Successful Response - * @throws ApiError - */ -export const useVariableServiceGetVariables = < - TData = Common.VariableServiceGetVariablesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - variableKeyPattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseVariableServiceGetVariablesKeyFn( - { limit, offset, orderBy, variableKeyPattern }, - queryKey, - ), - queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, - ...options, - }); -/** - * Get Dag Version - * Get one Dag Version. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @returns DagVersionResponse Successful Response - * @throws ApiError - */ -export const useDagVersionServiceGetDagVersion = < - TData = Common.DagVersionServiceGetDagVersionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - versionNumber, - }: { - dagId: string; - versionNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), - queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, - ...options, - }); -/** - * Get Dag Versions - * Get all DAG Versions. - * - * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.versionNumber - * @param data.bundleName - * @param data.bundleVersion - * @param data.orderBy - * @returns DAGVersionCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagVersionServiceGetDagVersions = < - TData = Common.DagVersionServiceGetDagVersionsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn( - { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, - queryKey, - ), - queryFn: () => - DagVersionService.getDagVersions({ - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }) as TData, - ...options, - }); -/** - * Get Health - * @returns HealthInfoResponse Successful Response - * @throws ApiError - */ -export const useMonitorServiceGetHealth = < - TData = Common.MonitorServiceGetHealthDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), - queryFn: () => MonitorService.getHealth() as TData, - ...options, - }); -/** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ -export const useVersionServiceGetVersion = < - TData = Common.VersionServiceGetVersionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), - queryFn: () => VersionService.getVersion() as TData, - ...options, - }); -/** - * Login - * Redirect to the login URL depending on the AuthManager configured. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const useLoginServiceLogin = < - TData = Common.LoginServiceLoginDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - next, - }: { - next?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), - queryFn: () => LoginService.login({ next }) as TData, - ...options, - }); -/** - * Logout - * Logout the user. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const useLoginServiceLogout = < - TData = Common.LoginServiceLogoutDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - next, - }: { - next?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), - queryFn: () => LoginService.logout({ next }) as TData, - ...options, - }); -/** - * Get Auth Menus - * @returns MenuItemCollectionResponse Successful Response - * @throws ApiError - */ -export const useAuthLinksServiceGetAuthMenus = < - TData = Common.AuthLinksServiceGetAuthMenusDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), - queryFn: () => AuthLinksService.getAuthMenus() as TData, - ...options, - }); -/** - * Recent Dag Runs - * Get recent DAG runs. - * @param data The data for the request. - * @param data.dagRunsLimit - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIds - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @returns DAGWithLatestDagRunsCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagsServiceRecentDagRuns = < - TData = Common.DagsServiceRecentDagRunsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDagsServiceRecentDagRunsKeyFn( - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }, - queryKey, - ), - queryFn: () => - DagsService.recentDagRuns({ - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }) as TData, - ...options, - }); -/** - * Get Dependencies - * Dependencies graph. - * @param data The data for the request. - * @param data.nodeId - * @returns BaseGraphResponse Successful Response - * @throws ApiError - */ -export const useDependenciesServiceGetDependencies = < - TData = Common.DependenciesServiceGetDependenciesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - nodeId, - }: { - nodeId?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), - queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, - ...options, - }); -/** - * Historical Metrics - * Return cluster activity historical metrics. - * @param data The data for the request. - * @param data.startDate - * @param data.endDate - * @returns HistoricalMetricDataResponse Successful Response - * @throws ApiError - */ -export const useDashboardServiceHistoricalMetrics = < - TData = Common.DashboardServiceHistoricalMetricsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - endDate, - startDate, - }: { - endDate?: string; - startDate: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), - queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, - ...options, - }); -/** - * Structure Data - * Get Structure Data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.externalDependencies - * @param data.versionNumber - * @returns StructureDataResponse Successful Response - * @throws ApiError - */ -export const useStructureServiceStructureData = < - TData = Common.StructureServiceStructureDataDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseStructureServiceStructureDataKeyFn( - { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, - queryKey, - ), - queryFn: () => - StructureService.structureData({ - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }) as TData, - ...options, - }); -/** - * Grid Data - * Return grid data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.offset - * @param data.runType - * @param data.state - * @param data.limit - * @param data.orderBy - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @returns GridResponse Successful Response - * @throws ApiError - */ -export const useGridServiceGridData = < - TData = Common.GridServiceGridDataDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useQuery({ - queryKey: Common.UseGridServiceGridDataKeyFn( - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }, - queryKey, - ), - queryFn: () => - GridService.gridData({ - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }) as TData, - ...options, - }); -/** - * Create Asset Event - * Create asset events. - * @param data The data for the request. - * @param data.requestBody - * @returns AssetEventResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceCreateAssetEvent = < - TData = Common.AssetServiceCreateAssetEventMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: CreateAssetEventsBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: CreateAssetEventsBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - AssetService.createAssetEvent({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Materialize Asset - * Materialize an asset by triggering a DAG run that produces it. - * @param data The data for the request. - * @param data.assetId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceMaterializeAsset = < - TData = Common.AssetServiceMaterializeAssetMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - assetId: number; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - assetId: number; - }, - TContext - >({ - mutationFn: ({ assetId }) => AssetService.materializeAsset({ assetId }) as unknown as Promise, - ...options, - }); -/** - * Create Backfill - * @param data The data for the request. - * @param data.requestBody - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceCreateBackfill = < - TData = Common.BackfillServiceCreateBackfillMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: BackfillPostBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: BackfillPostBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - BackfillService.createBackfill({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Create Backfill Dry Run - * @param data The data for the request. - * @param data.requestBody - * @returns DryRunBackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceCreateBackfillDryRun = < - TData = Common.BackfillServiceCreateBackfillDryRunMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: BackfillPostBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: BackfillPostBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - BackfillService.createBackfillDryRun({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Post Connection - * Create connection entry. - * @param data The data for the request. - * @param data.requestBody - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServicePostConnection = < - TData = Common.ConnectionServicePostConnectionMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: ConnectionBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: ConnectionBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - ConnectionService.postConnection({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Test Connection - * Test an API connection. - * - * This method first creates an in-memory transient conn_id & exports that to an env var, - * as some hook classes tries to find out the `conn` from their __init__ method & errors out if not found. - * It also deletes the conn id env connection after the test. - * @param data The data for the request. - * @param data.requestBody - * @returns ConnectionTestResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceTestConnection = < - TData = Common.ConnectionServiceTestConnectionMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: ConnectionBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: ConnectionBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - ConnectionService.testConnection({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Create Default Connections - * Create default connections. - * @returns void Successful Response - * @throws ApiError - */ -export const useConnectionServiceCreateDefaultConnections = < - TData = Common.ConnectionServiceCreateDefaultConnectionsMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit, "mutationFn">, -) => - useMutation({ - mutationFn: () => ConnectionService.createDefaultConnections() as unknown as Promise, - ...options, - }); -/** - * Clear Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @returns unknown Successful Response - * @throws ApiError - */ -export const useDagRunServiceClearDagRun = < - TData = Common.DagRunServiceClearDagRunMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: DAGRunClearBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: DAGRunClearBody; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, requestBody }) => - DagRunService.clearDagRun({ dagId, dagRunId, requestBody }) as unknown as Promise, - ...options, - }); -/** - * Trigger Dag Run - * Trigger a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceTriggerDagRun = < - TData = Common.DagRunServiceTriggerDagRunMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: unknown; - requestBody: TriggerDAGRunPostBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: unknown; - requestBody: TriggerDAGRunPostBody; - }, - TContext - >({ - mutationFn: ({ dagId, requestBody }) => - DagRunService.triggerDagRun({ dagId, requestBody }) as unknown as Promise, - ...options, - }); -/** - * Get List Dag Runs Batch - * Get a list of DAG Runs. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @returns DAGRunCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetListDagRunsBatch = < - TData = Common.DagRunServiceGetListDagRunsBatchMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: "~"; - requestBody: DAGRunsBatchBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: "~"; - requestBody: DAGRunsBatchBody; - }, - TContext - >({ - mutationFn: ({ dagId, requestBody }) => - DagRunService.getListDagRunsBatch({ dagId, requestBody }) as unknown as Promise, - ...options, - }); -/** - * Get Task Instances Batch - * Get list of task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstancesBatch = < - TData = Common.TaskInstanceServiceGetTaskInstancesBatchMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: "~"; - dagRunId: "~"; - requestBody: TaskInstancesBatchBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: "~"; - dagRunId: "~"; - requestBody: TaskInstancesBatchBody; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, requestBody }) => - TaskInstanceService.getTaskInstancesBatch({ - dagId, - dagRunId, - requestBody, - }) as unknown as Promise, - ...options, - }); -/** - * Post Clear Task Instances - * Clear task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServicePostClearTaskInstances = < - TData = Common.TaskInstanceServicePostClearTaskInstancesMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - requestBody: ClearTaskInstancesBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - requestBody: ClearTaskInstancesBody; - }, - TContext - >({ - mutationFn: ({ dagId, requestBody }) => - TaskInstanceService.postClearTaskInstances({ dagId, requestBody }) as unknown as Promise, - ...options, - }); -/** - * Post Pool - * Create a Pool. - * @param data The data for the request. - * @param data.requestBody - * @returns PoolResponse Successful Response - * @throws ApiError - */ -export const usePoolServicePostPool = < - TData = Common.PoolServicePostPoolMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: PoolBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: PoolBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => PoolService.postPool({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Create Xcom Entry - * Create an XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.requestBody - * @returns XComResponseNative Successful Response - * @throws ApiError - */ -export const useXcomServiceCreateXcomEntry = < - TData = Common.XcomServiceCreateXcomEntryMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: XComCreateBody; - taskId: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: XComCreateBody; - taskId: string; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, requestBody, taskId }) => - XcomService.createXcomEntry({ dagId, dagRunId, requestBody, taskId }) as unknown as Promise, - ...options, - }); -/** - * Post Variable - * Create a variable. - * @param data The data for the request. - * @param data.requestBody - * @returns VariableResponse Successful Response - * @throws ApiError - */ -export const useVariableServicePostVariable = < - TData = Common.VariableServicePostVariableMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: VariableBody; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: VariableBody; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - VariableService.postVariable({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Pause Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const useBackfillServicePauseBackfill = < - TData = Common.BackfillServicePauseBackfillMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - backfillId: unknown; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - backfillId: unknown; - }, - TContext - >({ - mutationFn: ({ backfillId }) => - BackfillService.pauseBackfill({ backfillId }) as unknown as Promise, - ...options, - }); -/** - * Unpause Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceUnpauseBackfill = < - TData = Common.BackfillServiceUnpauseBackfillMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - backfillId: unknown; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - backfillId: unknown; - }, - TContext - >({ - mutationFn: ({ backfillId }) => - BackfillService.unpauseBackfill({ backfillId }) as unknown as Promise, - ...options, - }); -/** - * Cancel Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceCancelBackfill = < - TData = Common.BackfillServiceCancelBackfillMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - backfillId: unknown; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - backfillId: unknown; - }, - TContext - >({ - mutationFn: ({ backfillId }) => - BackfillService.cancelBackfill({ backfillId }) as unknown as Promise, - ...options, - }); -/** - * Reparse Dag File - * Request re-parsing a DAG file. - * @param data The data for the request. - * @param data.fileToken - * @returns null Successful Response - * @throws ApiError - */ -export const useDagParsingServiceReparseDagFile = < - TData = Common.DagParsingServiceReparseDagFileMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - fileToken: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - fileToken: string; - }, - TContext - >({ - mutationFn: ({ fileToken }) => - DagParsingService.reparseDagFile({ fileToken }) as unknown as Promise, - ...options, - }); -/** - * Patch Connection - * Update a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @param data.requestBody - * @param data.updateMask - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServicePatchConnection = < - TData = Common.ConnectionServicePatchConnectionMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - connectionId: string; - requestBody: ConnectionBody; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - connectionId: string; - requestBody: ConnectionBody; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ connectionId, requestBody, updateMask }) => - ConnectionService.patchConnection({ - connectionId, - requestBody, - updateMask, - }) as unknown as Promise, - ...options, - }); -/** - * Bulk Connections - * Bulk create, update, and delete connections. - * @param data The data for the request. - * @param data.requestBody - * @returns BulkResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceBulkConnections = < - TData = Common.ConnectionServiceBulkConnectionsMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: BulkBody_ConnectionBody_; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: BulkBody_ConnectionBody_; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - ConnectionService.bulkConnections({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Patch Dag Run - * Modify a DAG Run. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @param data.updateMask - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const useDagRunServicePatchDagRun = < - TData = Common.DagRunServicePatchDagRunMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: DAGRunPatchBody; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: DAGRunPatchBody; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, requestBody, updateMask }) => - DagRunService.patchDagRun({ dagId, dagRunId, requestBody, updateMask }) as unknown as Promise, - ...options, - }); -/** - * Patch Dags - * Patch multiple DAGs. - * @param data The data for the request. - * @param data.requestBody - * @param data.updateMask - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIdPattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @returns DAGCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagServicePatchDags = < - TData = Common.DagServicePatchDagsMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagIdPattern?: string; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - requestBody: DAGPatchBody; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagIdPattern?: string; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - requestBody: DAGPatchBody; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ - dagIdPattern, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - requestBody, - tags, - tagsMatchMode, - updateMask, - }) => - DagService.patchDags({ - dagIdPattern, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - requestBody, - tags, - tagsMatchMode, - updateMask, - }) as unknown as Promise, - ...options, - }); -/** - * Patch Dag - * Patch the specific DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @param data.updateMask - * @returns DAGResponse Successful Response - * @throws ApiError - */ -export const useDagServicePatchDag = < - TData = Common.DagServicePatchDagMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - requestBody: DAGPatchBody; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - requestBody: DAGPatchBody; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ dagId, requestBody, updateMask }) => - DagService.patchDag({ dagId, requestBody, updateMask }) as unknown as Promise, - ...options, - }); -/** - * Patch Task Instance - * Update a task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.requestBody - * @param data.mapIndex - * @param data.updateMask - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServicePatchTaskInstance = < - TData = Common.TaskInstanceServicePatchTaskInstanceMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex?: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex?: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => - TaskInstanceService.patchTaskInstance({ - dagId, - dagRunId, - mapIndex, - requestBody, - taskId, - updateMask, - }) as unknown as Promise, - ...options, - }); -/** - * Patch Task Instance - * Update a task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @param data.requestBody - * @param data.updateMask - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServicePatchTaskInstance1 = < - TData = Common.TaskInstanceServicePatchTaskInstance1MutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => - TaskInstanceService.patchTaskInstance1({ - dagId, - dagRunId, - mapIndex, - requestBody, - taskId, - updateMask, - }) as unknown as Promise, - ...options, - }); -/** - * Patch Task Instance Dry Run - * Update a task instance dry_run mode. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @param data.requestBody - * @param data.updateMask - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServicePatchTaskInstanceDryRun = < - TData = Common.TaskInstanceServicePatchTaskInstanceDryRunMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => - TaskInstanceService.patchTaskInstanceDryRun({ - dagId, - dagRunId, - mapIndex, - requestBody, - taskId, - updateMask, - }) as unknown as Promise, - ...options, - }); -/** - * Patch Task Instance Dry Run - * Update a task instance dry_run mode. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.requestBody - * @param data.mapIndex - * @param data.updateMask - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServicePatchTaskInstanceDryRun1 = < - TData = Common.TaskInstanceServicePatchTaskInstanceDryRun1MutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex?: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - mapIndex?: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => - TaskInstanceService.patchTaskInstanceDryRun1({ - dagId, - dagRunId, - mapIndex, - requestBody, - taskId, - updateMask, - }) as unknown as Promise, - ...options, - }); -/** - * Patch Pool - * Update a Pool. - * @param data The data for the request. - * @param data.poolName - * @param data.requestBody - * @param data.updateMask - * @returns PoolResponse Successful Response - * @throws ApiError - */ -export const usePoolServicePatchPool = < - TData = Common.PoolServicePatchPoolMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - poolName: string; - requestBody: PoolPatchBody; - updateMask?: string[]; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - poolName: string; - requestBody: PoolPatchBody; - updateMask?: string[]; - }, - TContext - >({ - mutationFn: ({ poolName, requestBody, updateMask }) => - PoolService.patchPool({ poolName, requestBody, updateMask }) as unknown as Promise, - ...options, - }); -/** - * Bulk Pools - * Bulk create, update, and delete pools. - * @param data The data for the request. - * @param data.requestBody - * @returns BulkResponse Successful Response - * @throws ApiError - */ -export const usePoolServiceBulkPools = < - TData = Common.PoolServiceBulkPoolsMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: BulkBody_PoolBody_; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: BulkBody_PoolBody_; - }, - TContext - >({ - mutationFn: ({ requestBody }) => PoolService.bulkPools({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Update Xcom Entry - * Update an existing XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.xcomKey - * @param data.requestBody - * @returns XComResponseNative Successful Response - * @throws ApiError - */ -export const useXcomServiceUpdateXcomEntry = < - TData = Common.XcomServiceUpdateXcomEntryMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: XComUpdateBody; - taskId: string; - xcomKey: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - requestBody: XComUpdateBody; - taskId: string; - xcomKey: string; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId, requestBody, taskId, xcomKey }) => - XcomService.updateXcomEntry({ - dagId, - dagRunId, - requestBody, - taskId, - xcomKey, - }) as unknown as Promise, - ...options, - }); -/** - * Patch Variable - * Update a variable by key. - * @param data The data for the request. - * @param data.variableKey - * @param data.requestBody - * @param data.updateMask - * @returns VariableResponse Successful Response - * @throws ApiError - */ -export const useVariableServicePatchVariable = < - TData = Common.VariableServicePatchVariableMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: VariableBody; - updateMask?: string[]; - variableKey: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: VariableBody; - updateMask?: string[]; - variableKey: string; - }, - TContext - >({ - mutationFn: ({ requestBody, updateMask, variableKey }) => - VariableService.patchVariable({ requestBody, updateMask, variableKey }) as unknown as Promise, - ...options, - }); -/** - * Bulk Variables - * Bulk create, update, and delete variables. - * @param data The data for the request. - * @param data.requestBody - * @returns BulkResponse Successful Response - * @throws ApiError - */ -export const useVariableServiceBulkVariables = < - TData = Common.VariableServiceBulkVariablesMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - requestBody: BulkBody_VariableBody_; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - requestBody: BulkBody_VariableBody_; - }, - TContext - >({ - mutationFn: ({ requestBody }) => - VariableService.bulkVariables({ requestBody }) as unknown as Promise, - ...options, - }); -/** - * Delete Asset Queued Events - * Delete queued asset events for an asset. - * @param data The data for the request. - * @param data.assetId - * @param data.before - * @returns void Successful Response - * @throws ApiError - */ -export const useAssetServiceDeleteAssetQueuedEvents = < - TData = Common.AssetServiceDeleteAssetQueuedEventsMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - assetId: number; - before?: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - assetId: number; - before?: string; - }, - TContext - >({ - mutationFn: ({ assetId, before }) => - AssetService.deleteAssetQueuedEvents({ assetId, before }) as unknown as Promise, - ...options, - }); -/** - * Delete Dag Asset Queued Events - * @param data The data for the request. - * @param data.dagId - * @param data.before - * @returns void Successful Response - * @throws ApiError - */ -export const useAssetServiceDeleteDagAssetQueuedEvents = < - TData = Common.AssetServiceDeleteDagAssetQueuedEventsMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - before?: string; - dagId: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - before?: string; - dagId: string; - }, - TContext - >({ - mutationFn: ({ before, dagId }) => - AssetService.deleteDagAssetQueuedEvents({ before, dagId }) as unknown as Promise, - ...options, - }); -/** - * Delete Dag Asset Queued Event - * Delete a queued asset event for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.assetId - * @param data.before - * @returns void Successful Response - * @throws ApiError - */ -export const useAssetServiceDeleteDagAssetQueuedEvent = < - TData = Common.AssetServiceDeleteDagAssetQueuedEventMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - assetId: number; - before?: string; - dagId: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - assetId: number; - before?: string; - dagId: string; - }, - TContext - >({ - mutationFn: ({ assetId, before, dagId }) => - AssetService.deleteDagAssetQueuedEvent({ assetId, before, dagId }) as unknown as Promise, - ...options, - }); -/** - * Delete Connection - * Delete a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns void Successful Response - * @throws ApiError - */ -export const useConnectionServiceDeleteConnection = < - TData = Common.ConnectionServiceDeleteConnectionMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - connectionId: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - connectionId: string; - }, - TContext - >({ - mutationFn: ({ connectionId }) => - ConnectionService.deleteConnection({ connectionId }) as unknown as Promise, - ...options, - }); -/** - * Delete Dag Run - * Delete a DAG Run entry. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns void Successful Response - * @throws ApiError - */ -export const useDagRunServiceDeleteDagRun = < - TData = Common.DagRunServiceDeleteDagRunMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - dagRunId: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - dagRunId: string; - }, - TContext - >({ - mutationFn: ({ dagId, dagRunId }) => - DagRunService.deleteDagRun({ dagId, dagRunId }) as unknown as Promise, - ...options, - }); -/** - * Delete Dag - * Delete the specific DAG. - * @param data The data for the request. - * @param data.dagId - * @returns unknown Successful Response - * @throws ApiError - */ -export const useDagServiceDeleteDag = < - TData = Common.DagServiceDeleteDagMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - dagId: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - dagId: string; - }, - TContext - >({ mutationFn: ({ dagId }) => DagService.deleteDag({ dagId }) as unknown as Promise, ...options }); -/** - * Delete Pool - * Delete a pool entry. - * @param data The data for the request. - * @param data.poolName - * @returns void Successful Response - * @throws ApiError - */ -export const usePoolServiceDeletePool = < - TData = Common.PoolServiceDeletePoolMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - poolName: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - poolName: string; - }, - TContext - >({ - mutationFn: ({ poolName }) => PoolService.deletePool({ poolName }) as unknown as Promise, - ...options, - }); -/** - * Delete Variable - * Delete a variable entry. - * @param data The data for the request. - * @param data.variableKey - * @returns void Successful Response - * @throws ApiError - */ -export const useVariableServiceDeleteVariable = < - TData = Common.VariableServiceDeleteVariableMutationResult, - TError = unknown, - TContext = unknown, ->( - options?: Omit< - UseMutationOptions< - TData, - TError, - { - variableKey: string; - }, - TContext - >, - "mutationFn" - >, -) => - useMutation< - TData, - TError, - { - variableKey: string; - }, - TContext - >({ - mutationFn: ({ variableKey }) => - VariableService.deleteVariable({ variableKey }) as unknown as Promise, - ...options, - }); +* Get Assets +* Get assets. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.uriPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagIds +* @param data.onlyActive +* @param data.orderBy +* @returns AssetCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssets = = unknown[]>({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string[]; + uriPattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, queryKey), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) as TData, ...options }); +/** +* Get Asset Aliases +* Get asset aliases. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.orderBy +* @returns AssetAliasCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetAliases = = unknown[]>({ limit, namePattern, offset, orderBy }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, ...options }); +/** +* Get Asset Alias +* Get an asset alias. +* @param data The data for the request. +* @param data.assetAliasId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetAlias = = unknown[]>({ assetAliasId }: { + assetAliasId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, ...options }); +/** +* Get Asset Events +* Get asset events. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.assetId +* @param data.sourceDagId +* @param data.sourceTaskId +* @param data.sourceRunId +* @param data.sourceMapIndex +* @param data.timestampGte +* @param data.timestampLte +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetEvents = = unknown[]>({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string[]; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }, queryKey), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) as TData, ...options }); +/** +* Get Asset Queued Events +* Get queued asset events for an asset. +* @param data The data for the request. +* @param data.assetId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetQueuedEvents = = unknown[]>({ assetId, before }: { + assetId: number; + before?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, ...options }); +/** +* Get Asset +* Get an asset. +* @param data The data for the request. +* @param data.assetId +* @returns AssetResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAsset = = unknown[]>({ assetId }: { + assetId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), queryFn: () => AssetService.getAsset({ assetId }) as TData, ...options }); +/** +* Get Dag Asset Queued Events +* Get queued asset events for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetDagAssetQueuedEvents = = unknown[]>({ before, dagId }: { + before?: string; + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, ...options }); +/** +* Get Dag Asset Queued Event +* Get a queued asset event for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.assetId +* @param data.before +* @returns QueuedEventResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetDagAssetQueuedEvent = = unknown[]>({ assetId, before, dagId }: { + assetId: number; + before?: string; + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, ...options }); +/** +* Next Run Assets +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useAssetServiceNextRunAssets = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, ...options }); +/** +* List Backfills +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceListBackfills = = unknown[]>({ dagId, limit, offset, orderBy }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, ...options }); +/** +* Get Backfill +* @param data The data for the request. +* @param data.backfillId +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceGetBackfill = = unknown[]>({ backfillId }: { + backfillId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, ...options }); +/** +* List Backfills Ui +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.active +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceListBackfillsUi = = unknown[]>({ active, dagId, limit, offset, orderBy }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseBackfillServiceListBackfillsUiKeyFn({ active, dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfillsUi({ active, dagId, limit, offset, orderBy }) as TData, ...options }); +/** +* Get Connection +* Get a connection entry. +* @param data The data for the request. +* @param data.connectionId +* @returns ConnectionResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServiceGetConnection = = unknown[]>({ connectionId }: { + connectionId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, ...options }); +/** +* Get Connections +* Get all connection entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.connectionIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns ConnectionCollectionResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServiceGetConnections = = unknown[]>({ connectionIdPattern, limit, offset, orderBy }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }, queryKey), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, ...options }); +/** +* Hook Meta Data +* Retrieve information about available connection types (hook classes) and their parameters. +* @returns ConnectionHookMetaData Successful Response +* @throws ApiError +*/ +export const useConnectionServiceHookMetaData = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), queryFn: () => ConnectionService.hookMetaData() as TData, ...options }); +/** +* Get Dag Run +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns DAGRunResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceGetDagRun = = unknown[]>({ dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, ...options }); +/** +* Get Upstream Asset Events +* If dag run is asset-triggered, return the asset events that triggered it. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceGetUpstreamAssetEvents = = unknown[]>({ dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, ...options }); +/** +* Get Dag Runs +* Get all DAG Runs. +* +* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.runType +* @param data.state +* @param data.orderBy +* @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.triggeringUserNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGRunCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceGetDagRuns = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; + runIdPattern?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + triggeringUserNamePattern?: string; + updatedAtGte?: string; + updatedAtLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagRunServiceWaitDagRunUntilFinished = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useExperimentalServiceWaitDagRunUntilFinished = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); +/** +* Get Dag Source +* Get source code using file token. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @param data.accept +* @returns DAGSourceResponse Successful Response +* @throws ApiError +*/ +export const useDagSourceServiceGetDagSource = = unknown[]>({ accept, dagId, versionNumber }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, ...options }); +/** +* Get Dag Stats +* Get Dag statistics. +* @param data The data for the request. +* @param data.dagIds +* @returns DagStatsCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagStatsServiceGetDagStats = = unknown[]>({ dagIds }: { + dagIds?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, ...options }); +/** +* Get Dag Reports +* Get DAG report. +* @param data The data for the request. +* @param data.subdir +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagReportServiceGetDagReports = = unknown[]>({ subdir }: { + subdir: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), queryFn: () => DagReportService.getDagReports({ subdir }) as TData, ...options }); +/** +* Get Config +* @param data The data for the request. +* @param data.section +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const useConfigServiceGetConfig = = unknown[]>({ accept, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), queryFn: () => ConfigService.getConfig({ accept, section }) as TData, ...options }); +/** +* Get Config Value +* @param data The data for the request. +* @param data.section +* @param data.option +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const useConfigServiceGetConfigValue = = unknown[]>({ accept, option, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, ...options }); +/** +* Get Configs +* Get configs for UI. +* @returns ConfigResponse Successful Response +* @throws ApiError +*/ +export const useConfigServiceGetConfigs = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), queryFn: () => ConfigService.getConfigs() as TData, ...options }); +/** +* List Dag Warnings +* Get a list of DAG warnings. +* @param data The data for the request. +* @param data.dagId +* @param data.warningType +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns DAGWarningCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagWarningServiceListDagWarnings = = unknown[]>({ dagId, limit, offset, orderBy, warningType }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + warningType?: DagWarningType; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }, queryKey), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, ...options }); +/** +* Get Dags +* Get all DAGs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.dagRunStartDateGte +* @param data.dagRunStartDateLte +* @param data.dagRunEndDateGte +* @param data.dagRunEndDateLte +* @param data.dagRunState +* @param data.orderBy +* @param data.isFavorite +* @returns DAGCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDags = = unknown[]>({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +/** +* Get Dag +* Get basic information about a DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDag = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDag({ dagId }) as TData, ...options }); +/** +* Get Dag Details +* Get details of DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGDetailsResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagDetails = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDagDetails({ dagId }) as TData, ...options }); +/** +* Get Dag Tags +* Get all DAG tags. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.tagNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGTagCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagTags = = unknown[]>({ limit, offset, orderBy, tagNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + tagNamePattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, ...options }); +/** +* Get Dags +* Get DAGs with recent DagRun. +* @param data The data for the request. +* @param data.dagRunsLimit +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIds +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @param data.isFavorite +* @returns DAGWithLatestDagRunsCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagsUi = = unknown[]>({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDagsUi({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +/** +* Get Latest Run Info +* Get latest run. +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagServiceGetLatestRunInfo = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetLatestRunInfoKeyFn({ dagId }, queryKey), queryFn: () => DagService.getLatestRunInfo({ dagId }) as TData, ...options }); +/** +* Get Event Log +* @param data The data for the request. +* @param data.eventLogId +* @returns EventLogResponse Successful Response +* @throws ApiError +*/ +export const useEventLogServiceGetEventLog = = unknown[]>({ eventLogId }: { + eventLogId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, ...options }); +/** +* Get Event Logs +* Get all Event Logs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.taskId +* @param data.runId +* @param data.mapIndex +* @param data.tryNumber +* @param data.owner +* @param data.event +* @param data.excludedEvents +* @param data.includedEvents +* @param data.before +* @param data.after +* @returns EventLogCollectionResponse Successful Response +* @throws ApiError +*/ +export const useEventLogServiceGetEventLogs = = unknown[]>({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string[]; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }, queryKey), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) as TData, ...options }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const useExtraLinksServiceGetExtraLinks = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetExtraLinks = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstance = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Task Instances +* Get list of mapped task instances. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstances = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndex = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceDependencies = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instance Tries +* Get list of task instances history. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceTries = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Mapped Task Instance Tries +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstanceTries = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Mapped Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstance = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instances +* Get list of task instances. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs +* and DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.taskDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstances = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); +/** +* Get Task Instance Try Details +* Get task instance details by try number. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceTryDetails = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); +/** +* Get Mapped Task Instance Try Details +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstanceTryDetails = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); +/** +* Get Log +* Get logs for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.fullContent +* @param data.mapIndex +* @param data.token +* @param data.accept +* @returns TaskInstancesLogResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetLog = = unknown[]>({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, queryKey), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) as TData, ...options }); +/** +* Get External Log Url +* Get external log URL for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.mapIndex +* @returns ExternalLogUrlResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetExternalLogUrl = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, tryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + tryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetExternalLogUrlKeyFn({ dagId, dagRunId, mapIndex, taskId, tryNumber }, queryKey), queryFn: () => TaskInstanceService.getExternalLogUrl({ dagId, dagRunId, mapIndex, taskId, tryNumber }) as TData, ...options }); +/** +* Get Import Error +* Get an import error. +* @param data The data for the request. +* @param data.importErrorId +* @returns ImportErrorResponse Successful Response +* @throws ApiError +*/ +export const useImportErrorServiceGetImportError = = unknown[]>({ importErrorId }: { + importErrorId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, ...options }); +/** +* Get Import Errors +* Get all import errors. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns ImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const useImportErrorServiceGetImportErrors = = unknown[]>({ limit, offset, orderBy }: { + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options }); +/** +* Get Jobs +* Get all jobs. +* @param data The data for the request. +* @param data.isAlive +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.jobState +* @param data.jobType +* @param data.hostname +* @param data.executorClass +* @returns JobCollectionResponse Successful Response +* @throws ApiError +*/ +export const useJobServiceGetJobs = = unknown[]>({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + startDateGte?: string; + startDateLte?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }, queryKey), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) as TData, ...options }); +/** +* Get Plugins +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns PluginCollectionResponse Successful Response +* @throws ApiError +*/ +export const usePluginServiceGetPlugins = = unknown[]>({ limit, offset }: { + limit?: number; + offset?: number; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, ...options }); +/** +* Import Errors +* @returns PluginImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const usePluginServiceImportErrors = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UsePluginServiceImportErrorsKeyFn(queryKey), queryFn: () => PluginService.importErrors() as TData, ...options }); +/** +* Get Pool +* Get a pool. +* @param data The data for the request. +* @param data.poolName +* @returns PoolResponse Successful Response +* @throws ApiError +*/ +export const usePoolServiceGetPool = = unknown[]>({ poolName }: { + poolName: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), queryFn: () => PoolService.getPool({ poolName }) as TData, ...options }); +/** +* Get Pools +* Get all pools entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.poolNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns PoolCollectionResponse Successful Response +* @throws ApiError +*/ +export const usePoolServiceGetPools = = unknown[]>({ limit, offset, orderBy, poolNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + poolNamePattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, ...options }); +/** +* Get Providers +* Get providers. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns ProviderCollectionResponse Successful Response +* @throws ApiError +*/ +export const useProviderServiceGetProviders = = unknown[]>({ limit, offset }: { + limit?: number; + offset?: number; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, ...options }); +/** +* Get Xcom Entry +* Get an XCom entry. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @param data.dagRunId +* @param data.xcomKey +* @param data.mapIndex +* @param data.deserialize +* @param data.stringify +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useXcomServiceGetXcomEntry = = unknown[]>({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) as TData, ...options }); +/** +* Get Xcom Entries +* Get all XCom entries. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.xcomKey +* @param data.mapIndex +* @param data.limit +* @param data.offset +* @returns XComCollectionResponse Successful Response +* @throws ApiError +*/ +export const useXcomServiceGetXcomEntries = = unknown[]>({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, ...options }); +/** +* Get Tasks +* Get tasks for DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.orderBy +* @returns TaskCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskServiceGetTasks = = unknown[]>({ dagId, orderBy }: { + dagId: string; + orderBy?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, ...options }); +/** +* Get Task +* Get simplified representation of a task. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @returns TaskResponse Successful Response +* @throws ApiError +*/ +export const useTaskServiceGetTask = = unknown[]>({ dagId, taskId }: { + dagId: string; + taskId: unknown; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, ...options }); +/** +* Get Variable +* Get a variable entry. +* @param data The data for the request. +* @param data.variableKey +* @returns VariableResponse Successful Response +* @throws ApiError +*/ +export const useVariableServiceGetVariable = = unknown[]>({ variableKey }: { + variableKey: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), queryFn: () => VariableService.getVariable({ variableKey }) as TData, ...options }); +/** +* Get Variables +* Get all Variables entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.variableKeyPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns VariableCollectionResponse Successful Response +* @throws ApiError +*/ +export const useVariableServiceGetVariables = = unknown[]>({ limit, offset, orderBy, variableKeyPattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + variableKeyPattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }, queryKey), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, ...options }); +/** +* Get Dag Version +* Get one Dag Version. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @returns DagVersionResponse Successful Response +* @throws ApiError +*/ +export const useDagVersionServiceGetDagVersion = = unknown[]>({ dagId, versionNumber }: { + dagId: string; + versionNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, ...options }); +/** +* Get Dag Versions +* Get all DAG Versions. +* +* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.versionNumber +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @returns DAGVersionCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagVersionServiceGetDagVersions = = unknown[]>({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + versionNumber?: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); +/** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetail = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetMappedTiHitlDetail = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagRunId +* @param data.taskId +* @param data.taskIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.responseReceived +* @param data.userId +* @param data.subjectSearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.bodySearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetails = = unknown[]>({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }: { + bodySearch?: string; + dagId?: string; + dagIdPattern?: string; + dagRunId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + responseReceived?: boolean; + state?: string[]; + subjectSearch?: string; + taskId?: string; + taskIdPattern?: string; + userId?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetails({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }) as TData, ...options }); +/** +* Get Health +* @returns HealthInfoResponse Successful Response +* @throws ApiError +*/ +export const useMonitorServiceGetHealth = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), queryFn: () => MonitorService.getHealth() as TData, ...options }); +/** +* Get Version +* Get version information. +* @returns VersionInfo Successful Response +* @throws ApiError +*/ +export const useVersionServiceGetVersion = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), queryFn: () => VersionService.getVersion() as TData, ...options }); +/** +* Login +* Redirect to the login URL depending on the AuthManager configured. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useLoginServiceLogin = = unknown[]>({ next }: { + next?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), queryFn: () => LoginService.login({ next }) as TData, ...options }); +/** +* Logout +* Logout the user. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useLoginServiceLogout = = unknown[]>({ next }: { + next?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), queryFn: () => LoginService.logout({ next }) as TData, ...options }); +/** +* Refresh +* Refresh the authentication token. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useLoginServiceRefresh = = unknown[]>({ next }: { + next?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseLoginServiceRefreshKeyFn({ next }, queryKey), queryFn: () => LoginService.refresh({ next }) as TData, ...options }); +/** +* Get Auth Menus +* @returns MenuItemCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAuthLinksServiceGetAuthMenus = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), queryFn: () => AuthLinksService.getAuthMenus() as TData, ...options }); +/** +* Get Dependencies +* Dependencies graph. +* @param data The data for the request. +* @param data.nodeId +* @returns BaseGraphResponse Successful Response +* @throws ApiError +*/ +export const useDependenciesServiceGetDependencies = = unknown[]>({ nodeId }: { + nodeId?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, ...options }); +/** +* Historical Metrics +* Return cluster activity historical metrics. +* @param data The data for the request. +* @param data.startDate +* @param data.endDate +* @returns HistoricalMetricDataResponse Successful Response +* @throws ApiError +*/ +export const useDashboardServiceHistoricalMetrics = = unknown[]>({ endDate, startDate }: { + endDate?: string; + startDate: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, ...options }); +/** +* Dag Stats +* Return basic DAG stats with counts of DAGs in various states. +* @returns DashboardDagStatsResponse Successful Response +* @throws ApiError +*/ +export const useDashboardServiceDagStats = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(queryKey), queryFn: () => DashboardService.dagStats() as TData, ...options }); +/** +* Structure Data +* Get Structure Data. +* @param data The data for the request. +* @param data.dagId +* @param data.includeUpstream +* @param data.includeDownstream +* @param data.root +* @param data.externalDependencies +* @param data.versionNumber +* @returns StructureDataResponse Successful Response +* @throws ApiError +*/ +export const useStructureServiceStructureData = = unknown[]>({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, queryKey), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) as TData, ...options }); +/** +* Get Dag Structure +* Return dag structure for grid view. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridNodeResponse Successful Response +* @throws ApiError +*/ +export const useGridServiceGetDagStructure = = unknown[]>({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseGridServiceGetDagStructureKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }, queryKey), queryFn: () => GridService.getDagStructure({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) as TData, ...options }); +/** +* Get Grid Runs +* Get info about a run for the grid. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridRunsResponse Successful Response +* @throws ApiError +*/ +export const useGridServiceGetGridRuns = = unknown[]>({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseGridServiceGetGridRunsKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }, queryKey), queryFn: () => GridService.getGridRuns({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) as TData, ...options }); +/** +* Get Grid Ti Summaries +* Get states for TIs / "groups" of TIs. +* +* Essentially this is to know what color to put in the squares in the grid. +* +* The tricky part here is that we aggregate the state for groups and mapped tasks. +* +* We don't add all the TIs for mapped TIs -- we only add one entry for the mapped task and +* its state is an aggregate of its TI states. +* +* And for task groups, we add a "task" for that which is not really a task but is just +* an entry that represents the group (so that we can show a filled in box when the group +* is not expanded) and its state is an agg of those within it. +* @param data The data for the request. +* @param data.dagId +* @param data.runId +* @returns GridTISummaries Successful Response +* @throws ApiError +*/ +export const useGridServiceGetGridTiSummaries = = unknown[]>({ dagId, runId }: { + dagId: string; + runId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseGridServiceGetGridTiSummariesKeyFn({ dagId, runId }, queryKey), queryFn: () => GridService.getGridTiSummaries({ dagId, runId }) as TData, ...options }); +/** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const useCalendarServiceGetCalendar = = unknown[]>({ dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }, queryKey), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) as TData, ...options }); +/** +* Create Asset Event +* Create asset events. +* @param data The data for the request. +* @param data.requestBody +* @returns AssetEventResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceCreateAssetEvent = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => AssetService.createAssetEvent({ requestBody }) as unknown as Promise, ...options }); +/** +* Materialize Asset +* Materialize an asset by triggering a DAG run that produces it. +* @param data The data for the request. +* @param data.assetId +* @returns DAGRunResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceMaterializeAsset = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ assetId }) => AssetService.materializeAsset({ assetId }) as unknown as Promise, ...options }); +/** +* Create Backfill +* @param data The data for the request. +* @param data.requestBody +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceCreateBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => BackfillService.createBackfill({ requestBody }) as unknown as Promise, ...options }); +/** +* Create Backfill Dry Run +* @param data The data for the request. +* @param data.requestBody +* @returns DryRunBackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceCreateBackfillDryRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => BackfillService.createBackfillDryRun({ requestBody }) as unknown as Promise, ...options }); +/** +* Post Connection +* Create connection entry. +* @param data The data for the request. +* @param data.requestBody +* @returns ConnectionResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServicePostConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => ConnectionService.postConnection({ requestBody }) as unknown as Promise, ...options }); +/** +* Test Connection +* Test an API connection. +* +* This method first creates an in-memory transient conn_id & exports that to an env var, +* as some hook classes tries to find out the `conn` from their __init__ method & errors out if not found. +* It also deletes the conn id env connection after the test. +* @param data The data for the request. +* @param data.requestBody +* @returns ConnectionTestResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServiceTestConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => ConnectionService.testConnection({ requestBody }) as unknown as Promise, ...options }); +/** +* Create Default Connections +* Create default connections. +* @returns void Successful Response +* @throws ApiError +*/ +export const useConnectionServiceCreateDefaultConnections = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: () => ConnectionService.createDefaultConnections() as unknown as Promise, ...options }); +/** +* Clear Dag Run +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.requestBody +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagRunServiceClearDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody }) => DagRunService.clearDagRun({ dagId, dagRunId, requestBody }) as unknown as Promise, ...options }); +/** +* Trigger Dag Run +* Trigger a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.requestBody +* @returns DAGRunResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceTriggerDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody }) => DagRunService.triggerDagRun({ dagId, requestBody }) as unknown as Promise, ...options }); +/** +* Get List Dag Runs Batch +* Get a list of DAG Runs. +* @param data The data for the request. +* @param data.dagId +* @param data.requestBody +* @returns DAGRunCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceGetListDagRunsBatch = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody }) => DagRunService.getListDagRunsBatch({ dagId, requestBody }) as unknown as Promise, ...options }); +/** +* Favorite Dag +* Mark the DAG as favorite. +* @param data The data for the request. +* @param data.dagId +* @returns void Successful Response +* @throws ApiError +*/ +export const useDagServiceFavoriteDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId }) => DagService.favoriteDag({ dagId }) as unknown as Promise, ...options }); +/** +* Unfavorite Dag +* Unmark the DAG as favorite. +* @param data The data for the request. +* @param data.dagId +* @returns void Successful Response +* @throws ApiError +*/ +export const useDagServiceUnfavoriteDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId }) => DagService.unfavoriteDag({ dagId }) as unknown as Promise, ...options }); +/** +* Get Task Instances Batch +* Get list of task instances. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.requestBody +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstancesBatch = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody }) => TaskInstanceService.getTaskInstancesBatch({ dagId, dagRunId, requestBody }) as unknown as Promise, ...options }); +/** +* Post Clear Task Instances +* Clear task instances. +* @param data The data for the request. +* @param data.dagId +* @param data.requestBody +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServicePostClearTaskInstances = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody }) => TaskInstanceService.postClearTaskInstances({ dagId, requestBody }) as unknown as Promise, ...options }); +/** +* Post Pool +* Create a Pool. +* @param data The data for the request. +* @param data.requestBody +* @returns PoolResponse Successful Response +* @throws ApiError +*/ +export const usePoolServicePostPool = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => PoolService.postPool({ requestBody }) as unknown as Promise, ...options }); +/** +* Create Xcom Entry +* Create an XCom entry. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @param data.dagRunId +* @param data.requestBody +* @returns XComResponseNative Successful Response +* @throws ApiError +*/ +export const useXcomServiceCreateXcomEntry = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, taskId }) => XcomService.createXcomEntry({ dagId, dagRunId, requestBody, taskId }) as unknown as Promise, ...options }); +/** +* Post Variable +* Create a variable. +* @param data The data for the request. +* @param data.requestBody +* @returns VariableResponse Successful Response +* @throws ApiError +*/ +export const useVariableServicePostVariable = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => VariableService.postVariable({ requestBody }) as unknown as Promise, ...options }); +/** +* Pause Backfill +* @param data The data for the request. +* @param data.backfillId +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServicePauseBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ backfillId }) => BackfillService.pauseBackfill({ backfillId }) as unknown as Promise, ...options }); +/** +* Unpause Backfill +* @param data The data for the request. +* @param data.backfillId +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceUnpauseBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ backfillId }) => BackfillService.unpauseBackfill({ backfillId }) as unknown as Promise, ...options }); +/** +* Cancel Backfill +* @param data The data for the request. +* @param data.backfillId +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceCancelBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ backfillId }) => BackfillService.cancelBackfill({ backfillId }) as unknown as Promise, ...options }); +/** +* Reparse Dag File +* Request re-parsing a DAG file. +* @param data The data for the request. +* @param data.fileToken +* @returns null Successful Response +* @throws ApiError +*/ +export const useDagParsingServiceReparseDagFile = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ fileToken }) => DagParsingService.reparseDagFile({ fileToken }) as unknown as Promise, ...options }); +/** +* Patch Connection +* Update a connection entry. +* @param data The data for the request. +* @param data.connectionId +* @param data.requestBody +* @param data.updateMask +* @returns ConnectionResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServicePatchConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ connectionId, requestBody, updateMask }) => ConnectionService.patchConnection({ connectionId, requestBody, updateMask }) as unknown as Promise, ...options }); +/** +* Bulk Connections +* Bulk create, update, and delete connections. +* @param data The data for the request. +* @param data.requestBody +* @returns BulkResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServiceBulkConnections = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => ConnectionService.bulkConnections({ requestBody }) as unknown as Promise, ...options }); +/** +* Patch Dag Run +* Modify a DAG Run. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.requestBody +* @param data.updateMask +* @returns DAGRunResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServicePatchDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, updateMask }) => DagRunService.patchDagRun({ dagId, dagRunId, requestBody, updateMask }) as unknown as Promise, ...options }); +/** +* Patch Dags +* Patch multiple DAGs. +* @param data The data for the request. +* @param data.requestBody +* @param data.updateMask +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @returns DAGCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagServicePatchDags = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagIdPattern, excludeStale, limit, offset, owners, paused, requestBody, tags, tagsMatchMode, updateMask }) => DagService.patchDags({ dagIdPattern, excludeStale, limit, offset, owners, paused, requestBody, tags, tagsMatchMode, updateMask }) as unknown as Promise, ...options }); +/** +* Patch Dag +* Patch the specific DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.requestBody +* @param data.updateMask +* @returns DAGResponse Successful Response +* @throws ApiError +*/ +export const useDagServicePatchDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody, updateMask }) => DagService.patchDag({ dagId, requestBody, updateMask }) as unknown as Promise, ...options }); +/** +* Patch Task Instance +* Update a task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.requestBody +* @param data.mapIndex +* @param data.updateMask +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServicePatchTaskInstance = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstance({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); +/** +* Patch Task Instance +* Update a task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @param data.requestBody +* @param data.updateMask +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServicePatchTaskInstanceByMapIndex = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstanceByMapIndex({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); +/** +* Bulk Task Instances +* Bulk update, and delete task instances. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.requestBody +* @returns BulkResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceBulkTaskInstances = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody }) => TaskInstanceService.bulkTaskInstances({ dagId, dagRunId, requestBody }) as unknown as Promise, ...options }); +/** +* Patch Task Instance Dry Run +* Update a task instance dry_run mode. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @param data.requestBody +* @param data.updateMask +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServicePatchTaskInstanceDryRunByMapIndex = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstanceDryRunByMapIndex({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); +/** +* Patch Task Instance Dry Run +* Update a task instance dry_run mode. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.requestBody +* @param data.mapIndex +* @param data.updateMask +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServicePatchTaskInstanceDryRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstanceDryRun({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); +/** +* Patch Pool +* Update a Pool. +* @param data The data for the request. +* @param data.poolName +* @param data.requestBody +* @param data.updateMask +* @returns PoolResponse Successful Response +* @throws ApiError +*/ +export const usePoolServicePatchPool = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ poolName, requestBody, updateMask }) => PoolService.patchPool({ poolName, requestBody, updateMask }) as unknown as Promise, ...options }); +/** +* Bulk Pools +* Bulk create, update, and delete pools. +* @param data The data for the request. +* @param data.requestBody +* @returns BulkResponse Successful Response +* @throws ApiError +*/ +export const usePoolServiceBulkPools = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => PoolService.bulkPools({ requestBody }) as unknown as Promise, ...options }); +/** +* Update Xcom Entry +* Update an existing XCom entry. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @param data.dagRunId +* @param data.xcomKey +* @param data.requestBody +* @returns XComResponseNative Successful Response +* @throws ApiError +*/ +export const useXcomServiceUpdateXcomEntry = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, taskId, xcomKey }) => XcomService.updateXcomEntry({ dagId, dagRunId, requestBody, taskId, xcomKey }) as unknown as Promise, ...options }); +/** +* Patch Variable +* Update a variable by key. +* @param data The data for the request. +* @param data.variableKey +* @param data.requestBody +* @param data.updateMask +* @returns VariableResponse Successful Response +* @throws ApiError +*/ +export const useVariableServicePatchVariable = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody, updateMask, variableKey }) => VariableService.patchVariable({ requestBody, updateMask, variableKey }) as unknown as Promise, ...options }); +/** +* Bulk Variables +* Bulk create, update, and delete variables. +* @param data The data for the request. +* @param data.requestBody +* @returns BulkResponse Successful Response +* @throws ApiError +*/ +export const useVariableServiceBulkVariables = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => VariableService.bulkVariables({ requestBody }) as unknown as Promise, ...options }); +/** +* Update Hitl Detail +* Update a Human-in-the-loop detail. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.requestBody +* @returns HITLDetailResponse Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceUpdateHitlDetail = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, taskId }) => HumanInTheLoopService.updateHitlDetail({ dagId, dagRunId, requestBody, taskId }) as unknown as Promise, ...options }); +/** +* Update Mapped Ti Hitl Detail +* Update a Human-in-the-loop detail. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @param data.requestBody +* @returns HITLDetailResponse Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceUpdateMappedTiHitlDetail = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId }) => HumanInTheLoopService.updateMappedTiHitlDetail({ dagId, dagRunId, mapIndex, requestBody, taskId }) as unknown as Promise, ...options }); +/** +* Delete Asset Queued Events +* Delete queued asset events for an asset. +* @param data The data for the request. +* @param data.assetId +* @param data.before +* @returns void Successful Response +* @throws ApiError +*/ +export const useAssetServiceDeleteAssetQueuedEvents = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ assetId, before }) => AssetService.deleteAssetQueuedEvents({ assetId, before }) as unknown as Promise, ...options }); +/** +* Delete Dag Asset Queued Events +* @param data The data for the request. +* @param data.dagId +* @param data.before +* @returns void Successful Response +* @throws ApiError +*/ +export const useAssetServiceDeleteDagAssetQueuedEvents = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ before, dagId }) => AssetService.deleteDagAssetQueuedEvents({ before, dagId }) as unknown as Promise, ...options }); +/** +* Delete Dag Asset Queued Event +* Delete a queued asset event for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.assetId +* @param data.before +* @returns void Successful Response +* @throws ApiError +*/ +export const useAssetServiceDeleteDagAssetQueuedEvent = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ assetId, before, dagId }) => AssetService.deleteDagAssetQueuedEvent({ assetId, before, dagId }) as unknown as Promise, ...options }); +/** +* Delete Connection +* Delete a connection entry. +* @param data The data for the request. +* @param data.connectionId +* @returns void Successful Response +* @throws ApiError +*/ +export const useConnectionServiceDeleteConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ connectionId }) => ConnectionService.deleteConnection({ connectionId }) as unknown as Promise, ...options }); +/** +* Delete Dag Run +* Delete a DAG Run entry. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns void Successful Response +* @throws ApiError +*/ +export const useDagRunServiceDeleteDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId }) => DagRunService.deleteDagRun({ dagId, dagRunId }) as unknown as Promise, ...options }); +/** +* Delete Dag +* Delete the specific DAG. +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagServiceDeleteDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId }) => DagService.deleteDag({ dagId }) as unknown as Promise, ...options }); +/** +* Delete Task Instance +* Delete a task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns null Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceDeleteTaskInstance = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, taskId }) => TaskInstanceService.deleteTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as unknown as Promise, ...options }); +/** +* Delete Pool +* Delete a pool entry. +* @param data The data for the request. +* @param data.poolName +* @returns void Successful Response +* @throws ApiError +*/ +export const usePoolServiceDeletePool = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ poolName }) => PoolService.deletePool({ poolName }) as unknown as Promise, ...options }); +/** +* Delete Variable +* Delete a variable entry. +* @param data The data for the request. +* @param data.variableKey +* @returns void Successful Response +* @throws ApiError +*/ +export const useVariableServiceDeleteVariable = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ variableKey }) => VariableService.deleteVariable({ variableKey }) as unknown as Promise, ...options }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts index 0bb328cbb241f..d2b2ee9fcd6bd 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts @@ -1,2974 +1,1448 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; +// generated with @7nohe/openapi-react-query-codegen@1.6.2 -import { - AssetService, - AuthLinksService, - BackfillService, - ConfigService, - ConnectionService, - DagReportService, - DagRunService, - DagService, - DagSourceService, - DagStatsService, - DagVersionService, - DagWarningService, - DagsService, - DashboardService, - DependenciesService, - EventLogService, - ExtraLinksService, - GridService, - ImportErrorService, - JobService, - LoginService, - MonitorService, - PluginService, - PoolService, - ProviderService, - StructureService, - TaskInstanceService, - TaskService, - VariableService, - VersionService, - XcomService, -} from "../requests/services.gen"; +import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; - /** - * Get Assets - * Get assets. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.uriPattern - * @param data.dagIds - * @param data.onlyActive - * @param data.orderBy - * @returns AssetCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetsSuspense = < - TData = Common.AssetServiceGetAssetsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetAssetsKeyFn( - { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, - queryKey, - ), - queryFn: () => - AssetService.getAssets({ - dagIds, - limit, - namePattern, - offset, - onlyActive, - orderBy, - uriPattern, - }) as TData, - ...options, - }); -/** - * Get Asset Aliases - * Get asset aliases. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.orderBy - * @returns AssetAliasCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetAliasesSuspense = < - TData = Common.AssetServiceGetAssetAliasesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - namePattern, - offset, - orderBy, - }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), - queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Asset Alias - * Get an asset alias. - * @param data The data for the request. - * @param data.assetAliasId - * @returns unknown Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetAliasSuspense = < - TData = Common.AssetServiceGetAssetAliasDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetAliasId, - }: { - assetAliasId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), - queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, - ...options, - }); -/** - * Get Asset Events - * Get asset events. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.assetId - * @param data.sourceDagId - * @param data.sourceTaskId - * @param data.sourceRunId - * @param data.sourceMapIndex - * @param data.timestampGte - * @param data.timestampLte - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetEventsSuspense = < - TData = Common.AssetServiceGetAssetEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetAssetEventsKeyFn( - { - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }, - queryKey, - ), - queryFn: () => - AssetService.getAssetEvents({ - assetId, - limit, - offset, - orderBy, - sourceDagId, - sourceMapIndex, - sourceRunId, - sourceTaskId, - timestampGte, - timestampLte, - }) as TData, - ...options, - }); -/** - * Get Asset Queued Events - * Get queued asset events for an asset. - * @param data The data for the request. - * @param data.assetId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetQueuedEventsSuspense = < - TData = Common.AssetServiceGetAssetQueuedEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - before, - }: { - assetId: number; - before?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), - queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, - ...options, - }); -/** - * Get Asset - * Get an asset. - * @param data The data for the request. - * @param data.assetId - * @returns AssetResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetAssetSuspense = < - TData = Common.AssetServiceGetAssetDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - }: { - assetId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), - queryFn: () => AssetService.getAsset({ assetId }) as TData, - ...options, - }); -/** - * Get Dag Asset Queued Events - * Get queued asset events for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetDagAssetQueuedEventsSuspense = < - TData = Common.AssetServiceGetDagAssetQueuedEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - before, - dagId, - }: { - before?: string; - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), - queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, - ...options, - }); -/** - * Get Dag Asset Queued Event - * Get a queued asset event for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.assetId - * @param data.before - * @returns QueuedEventResponse Successful Response - * @throws ApiError - */ -export const useAssetServiceGetDagAssetQueuedEventSuspense = < - TData = Common.AssetServiceGetDagAssetQueuedEventDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - assetId, - before, - dagId, - }: { - assetId: number; - before?: string; - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), - queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, - ...options, - }); -/** - * Next Run Assets - * @param data The data for the request. - * @param data.dagId - * @returns unknown Successful Response - * @throws ApiError - */ -export const useAssetServiceNextRunAssetsSuspense = < - TData = Common.AssetServiceNextRunAssetsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - }: { - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), - queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, - ...options, - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceListBackfillsSuspense = < - TData = Common.BackfillServiceListBackfillsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - limit, - offset, - orderBy, - }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), - queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceGetBackfillSuspense = < - TData = Common.BackfillServiceGetBackfillDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - backfillId, - }: { - backfillId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), - queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, - ...options, - }); -/** - * List Backfills - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.active - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ -export const useBackfillServiceListBackfills1Suspense = < - TData = Common.BackfillServiceListBackfills1DefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - active, - dagId, - limit, - offset, - orderBy, - }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseBackfillServiceListBackfills1KeyFn( - { active, dagId, limit, offset, orderBy }, - queryKey, - ), - queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceGetConnectionSuspense = < - TData = Common.ConnectionServiceGetConnectionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - connectionId, - }: { - connectionId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), - queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, - ...options, - }); -/** - * Get Connections - * Get all connection entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.connectionIdPattern - * @returns ConnectionCollectionResponse Successful Response - * @throws ApiError - */ -export const useConnectionServiceGetConnectionsSuspense = < - TData = Common.ConnectionServiceGetConnectionsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - connectionIdPattern, - limit, - offset, - orderBy, - }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( - { connectionIdPattern, limit, offset, orderBy }, - queryKey, - ), - queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Hook Meta Data - * Retrieve information about available connection types (hook classes) and their parameters. - * @returns ConnectionHookMetaData Successful Response - * @throws ApiError - */ -export const useConnectionServiceHookMetaDataSuspense = < - TData = Common.ConnectionServiceHookMetaDataDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), - queryFn: () => ConnectionService.hookMetaData() as TData, - ...options, - }); -/** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetDagRunSuspense = < - TData = Common.DagRunServiceGetDagRunDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, - ...options, - }); -/** - * Get Upstream Asset Events - * If dag run is asset-triggered, return the asset events that triggered it. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetUpstreamAssetEventsSuspense = < - TData = Common.DagRunServiceGetUpstreamAssetEventsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), - queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, - ...options, - }); -/** - * Get Dag Runs - * Get all DAG Runs. - * - * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.runType - * @param data.state - * @param data.orderBy - * @returns DAGRunCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagRunServiceGetDagRunsSuspense = < - TData = Common.DagRunServiceGetDagRunsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagRunServiceGetDagRunsKeyFn( - { - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }, - queryKey, - ), - queryFn: () => - DagRunService.getDagRuns({ - dagId, - endDateGte, - endDateLte, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - runAfterGte, - runAfterLte, - runType, - startDateGte, - startDateLte, - state, - updatedAtGte, - updatedAtLte, - }) as TData, - ...options, - }); -/** - * Get Dag Source - * Get source code using file token. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @param data.accept - * @returns DAGSourceResponse Successful Response - * @throws ApiError - */ -export const useDagSourceServiceGetDagSourceSuspense = < - TData = Common.DagSourceServiceGetDagSourceDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - dagId, - versionNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), - queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, - ...options, - }); -/** - * Get Dag Stats - * Get Dag statistics. - * @param data The data for the request. - * @param data.dagIds - * @returns DagStatsCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagStatsServiceGetDagStatsSuspense = < - TData = Common.DagStatsServiceGetDagStatsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagIds, - }: { - dagIds?: string[]; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), - queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, - ...options, - }); -/** - * Get Dag Reports - * Get DAG report. - * @param data The data for the request. - * @param data.subdir - * @returns unknown Successful Response - * @throws ApiError - */ -export const useDagReportServiceGetDagReportsSuspense = < - TData = Common.DagReportServiceGetDagReportsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - subdir, - }: { - subdir: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), - queryFn: () => DagReportService.getDagReports({ subdir }) as TData, - ...options, - }); -/** - * Get Config - * @param data The data for the request. - * @param data.section - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const useConfigServiceGetConfigSuspense = < - TData = Common.ConfigServiceGetConfigDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), - queryFn: () => ConfigService.getConfig({ accept, section }) as TData, - ...options, - }); -/** - * Get Config Value - * @param data The data for the request. - * @param data.section - * @param data.option - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ -export const useConfigServiceGetConfigValueSuspense = < - TData = Common.ConfigServiceGetConfigValueDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - option, - section, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), - queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, - ...options, - }); -/** - * Get Configs - * Get configs for UI. - * @returns ConfigResponse Successful Response - * @throws ApiError - */ -export const useConfigServiceGetConfigsSuspense = < - TData = Common.ConfigServiceGetConfigsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), - queryFn: () => ConfigService.getConfigs() as TData, - ...options, - }); -/** - * List Dag Warnings - * Get a list of DAG warnings. - * @param data The data for the request. - * @param data.dagId - * @param data.warningType - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns DAGWarningCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagWarningServiceListDagWarningsSuspense = < - TData = Common.DagWarningServiceListDagWarningsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - limit, - offset, - orderBy, - warningType, - }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( - { dagId, limit, offset, orderBy, warningType }, - queryKey, - ), - queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, - ...options, - }); -/** - * Get Dags - * Get all DAGs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @param data.dagRunStartDateGte - * @param data.dagRunStartDateLte - * @param data.dagRunEndDateGte - * @param data.dagRunEndDateLte - * @param data.dagRunState - * @param data.orderBy - * @returns DAGCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDagsSuspense = < - TData = Common.DagServiceGetDagsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagServiceGetDagsKeyFn( - { - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }, - queryKey, - ), - queryFn: () => - DagService.getDags({ - dagDisplayNamePattern, - dagIdPattern, - dagRunEndDateGte, - dagRunEndDateLte, - dagRunStartDateGte, - dagRunStartDateLte, - dagRunState, - excludeStale, - lastDagRunState, - limit, - offset, - orderBy, - owners, - paused, - tags, - tagsMatchMode, - }) as TData, - ...options, - }); -/** - * Get Dag - * Get basic information about a DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDagSuspense = < - TData = Common.DagServiceGetDagDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - }: { - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), - queryFn: () => DagService.getDag({ dagId }) as TData, - ...options, - }); -/** - * Get Dag Details - * Get details of DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGDetailsResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDagDetailsSuspense = < - TData = Common.DagServiceGetDagDetailsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - }: { - dagId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), - queryFn: () => DagService.getDagDetails({ dagId }) as TData, - ...options, - }); -/** - * Get Dag Tags - * Get all DAG tags. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.tagNamePattern - * @returns DAGTagCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagServiceGetDagTagsSuspense = < - TData = Common.DagServiceGetDagTagsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - tagNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), - queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, - ...options, - }); -/** - * Get Event Log - * @param data The data for the request. - * @param data.eventLogId - * @returns EventLogResponse Successful Response - * @throws ApiError - */ -export const useEventLogServiceGetEventLogSuspense = < - TData = Common.EventLogServiceGetEventLogDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - eventLogId, - }: { - eventLogId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), - queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, - ...options, - }); -/** - * Get Event Logs - * Get all Event Logs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.taskId - * @param data.runId - * @param data.mapIndex - * @param data.tryNumber - * @param data.owner - * @param data.event - * @param data.excludedEvents - * @param data.includedEvents - * @param data.before - * @param data.after - * @returns EventLogCollectionResponse Successful Response - * @throws ApiError - */ -export const useEventLogServiceGetEventLogsSuspense = < - TData = Common.EventLogServiceGetEventLogsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseEventLogServiceGetEventLogsKeyFn( - { - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }, - queryKey, - ), - queryFn: () => - EventLogService.getEventLogs({ - after, - before, - dagId, - event, - excludedEvents, - includedEvents, - limit, - mapIndex, - offset, - orderBy, - owner, - runId, - taskId, - tryNumber, - }) as TData, - ...options, - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const useExtraLinksServiceGetExtraLinksSuspense = < - TData = Common.ExtraLinksServiceGetExtraLinksDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), - queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetExtraLinksSuspense = < - TData = Common.TaskInstanceServiceGetExtraLinksDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceSuspense = < - TData = Common.TaskInstanceServiceGetTaskInstanceDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - taskId, - }: { - dagId: string; - dagRunId: string; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), - queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, - ...options, - }); -/** - * Get Mapped Task Instances - * Get list of mapped task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstancesSuspense = < - TData = Common.TaskInstanceServiceGetMappedTaskInstancesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getMappedTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }) as TData, - ...options, - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceDependenciesSuspense = < - TData = Common.TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceDependencies1Suspense = < - TData = Common.TaskInstanceServiceGetTaskInstanceDependencies1DefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependencies1KeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstanceDependencies1({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instance Tries - * Get list of task instances history. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceTriesSuspense = < - TData = Common.TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Mapped Task Instance Tries - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstanceTriesSuspense = < - TData = Common.TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Mapped Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstanceSuspense = < - TData = Common.TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn( - { dagId, dagRunId, mapIndex, taskId }, - queryKey, - ), - queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, - ...options, - }); -/** - * Get Task Instances - * Get list of task instances. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs - * and DAG runs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.taskDisplayNamePattern - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstancesSuspense = < - TData = Common.TaskInstanceServiceGetTaskInstancesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn( - { - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstances({ - dagId, - dagRunId, - durationGte, - durationLte, - endDateGte, - endDateLte, - executor, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - pool, - queue, - runAfterGte, - runAfterLte, - startDateGte, - startDateLte, - state, - taskDisplayNamePattern, - taskId, - updatedAtGte, - updatedAtLte, - versionNumber, - }) as TData, - ...options, - }); -/** - * Get Task Instance Try Details - * Get task instance details by try number. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetTaskInstanceTryDetailsSuspense = < - TData = Common.TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn( - { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getTaskInstanceTryDetails({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }) as TData, - ...options, - }); -/** - * Get Mapped Task Instance Try Details - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsSuspense = < - TData = Common.TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn( - { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getMappedTaskInstanceTryDetails({ - dagId, - dagRunId, - mapIndex, - taskId, - taskTryNumber, - }) as TData, - ...options, - }); -/** - * Get Log - * Get logs for a specific task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.tryNumber - * @param data.fullContent - * @param data.mapIndex - * @param data.token - * @param data.accept - * @returns TaskInstancesLogResponse Successful Response - * @throws ApiError - */ -export const useTaskInstanceServiceGetLogSuspense = < - TData = Common.TaskInstanceServiceGetLogDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskInstanceServiceGetLogKeyFn( - { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, - queryKey, - ), - queryFn: () => - TaskInstanceService.getLog({ - accept, - dagId, - dagRunId, - fullContent, - mapIndex, - taskId, - token, - tryNumber, - }) as TData, - ...options, - }); -/** - * Get Import Error - * Get an import error. - * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response - * @throws ApiError - */ -export const useImportErrorServiceGetImportErrorSuspense = < - TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - importErrorId, - }: { - importErrorId: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), - queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, - ...options, - }); -/** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ -export const useImportErrorServiceGetImportErrorsSuspense = < - TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - }: { - limit?: number; - offset?: number; - orderBy?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), - queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, - ...options, - }); -/** - * Get Jobs - * Get all jobs. - * @param data The data for the request. - * @param data.isAlive - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.jobState - * @param data.jobType - * @param data.hostname - * @param data.executorClass - * @returns JobCollectionResponse Successful Response - * @throws ApiError - */ -export const useJobServiceGetJobsSuspense = < - TData = Common.JobServiceGetJobsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseJobServiceGetJobsKeyFn( - { - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }, - queryKey, - ), - queryFn: () => - JobService.getJobs({ - endDateGte, - endDateLte, - executorClass, - hostname, - isAlive, - jobState, - jobType, - limit, - offset, - orderBy, - startDateGte, - startDateLte, - }) as TData, - ...options, - }); -/** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ -export const usePluginServiceGetPluginsSuspense = < - TData = Common.PluginServiceGetPluginsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), - queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, - ...options, - }); -/** - * Get Pool - * Get a pool. - * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response - * @throws ApiError - */ -export const usePoolServiceGetPoolSuspense = < - TData = Common.PoolServiceGetPoolDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - poolName, - }: { - poolName: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), - queryFn: () => PoolService.getPool({ poolName }) as TData, - ...options, - }); -/** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.poolNamePattern - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ -export const usePoolServiceGetPoolsSuspense = < - TData = Common.PoolServiceGetPoolsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - poolNamePattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), - queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, - ...options, - }); -/** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ -export const useProviderServiceGetProvidersSuspense = < - TData = Common.ProviderServiceGetProvidersDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - }: { - limit?: number; - offset?: number; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), - queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, - ...options, - }); -/** - * Get Xcom Entry - * Get an XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.xcomKey - * @param data.mapIndex - * @param data.deserialize - * @param data.stringify - * @returns unknown Successful Response - * @throws ApiError - */ -export const useXcomServiceGetXcomEntrySuspense = < - TData = Common.XcomServiceGetXcomEntryDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseXcomServiceGetXcomEntryKeyFn( - { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, - queryKey, - ), - queryFn: () => - XcomService.getXcomEntry({ - dagId, - dagRunId, - deserialize, - mapIndex, - stringify, - taskId, - xcomKey, - }) as TData, - ...options, - }); -/** - * Get Xcom Entries - * Get all XCom entries. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.xcomKey - * @param data.mapIndex - * @param data.limit - * @param data.offset - * @returns XComCollectionResponse Successful Response - * @throws ApiError - */ -export const useXcomServiceGetXcomEntriesSuspense = < - TData = Common.XcomServiceGetXcomEntriesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - dagRunId, - limit, - mapIndex, - offset, - taskId, - xcomKey, - }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn( - { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, - queryKey, - ), - queryFn: () => - XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, - ...options, - }); -/** - * Get Tasks - * Get tasks for DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response - * @throws ApiError - */ -export const useTaskServiceGetTasksSuspense = < - TData = Common.TaskServiceGetTasksDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - orderBy, - }: { - dagId: string; - orderBy?: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), - queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, - ...options, - }); -/** - * Get Task - * Get simplified representation of a task. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response - * @throws ApiError - */ -export const useTaskServiceGetTaskSuspense = < - TData = Common.TaskServiceGetTaskDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - taskId, - }: { - dagId: string; - taskId: unknown; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), - queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, - ...options, - }); -/** - * Get Variable - * Get a variable entry. - * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response - * @throws ApiError - */ -export const useVariableServiceGetVariableSuspense = < - TData = Common.VariableServiceGetVariableDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - variableKey, - }: { - variableKey: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), - queryFn: () => VariableService.getVariable({ variableKey }) as TData, - ...options, - }); -/** - * Get Variables - * Get all Variables entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.variableKeyPattern - * @returns VariableCollectionResponse Successful Response - * @throws ApiError - */ -export const useVariableServiceGetVariablesSuspense = < - TData = Common.VariableServiceGetVariablesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - limit, - offset, - orderBy, - variableKeyPattern, - }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseVariableServiceGetVariablesKeyFn( - { limit, offset, orderBy, variableKeyPattern }, - queryKey, - ), - queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, - ...options, - }); -/** - * Get Dag Version - * Get one Dag Version. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @returns DagVersionResponse Successful Response - * @throws ApiError - */ -export const useDagVersionServiceGetDagVersionSuspense = < - TData = Common.DagVersionServiceGetDagVersionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - versionNumber, - }: { - dagId: string; - versionNumber: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), - queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, - ...options, - }); -/** - * Get Dag Versions - * Get all DAG Versions. - * - * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.versionNumber - * @param data.bundleName - * @param data.bundleVersion - * @param data.orderBy - * @returns DAGVersionCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagVersionServiceGetDagVersionsSuspense = < - TData = Common.DagVersionServiceGetDagVersionsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn( - { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, - queryKey, - ), - queryFn: () => - DagVersionService.getDagVersions({ - bundleName, - bundleVersion, - dagId, - limit, - offset, - orderBy, - versionNumber, - }) as TData, - ...options, - }); -/** - * Get Health - * @returns HealthInfoResponse Successful Response - * @throws ApiError - */ -export const useMonitorServiceGetHealthSuspense = < - TData = Common.MonitorServiceGetHealthDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), - queryFn: () => MonitorService.getHealth() as TData, - ...options, - }); -/** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ -export const useVersionServiceGetVersionSuspense = < - TData = Common.VersionServiceGetVersionDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), - queryFn: () => VersionService.getVersion() as TData, - ...options, - }); -/** - * Login - * Redirect to the login URL depending on the AuthManager configured. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const useLoginServiceLoginSuspense = < - TData = Common.LoginServiceLoginDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - next, - }: { - next?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), - queryFn: () => LoginService.login({ next }) as TData, - ...options, - }); -/** - * Logout - * Logout the user. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ -export const useLoginServiceLogoutSuspense = < - TData = Common.LoginServiceLogoutDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - next, - }: { - next?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), - queryFn: () => LoginService.logout({ next }) as TData, - ...options, - }); -/** - * Get Auth Menus - * @returns MenuItemCollectionResponse Successful Response - * @throws ApiError - */ -export const useAuthLinksServiceGetAuthMenusSuspense = < - TData = Common.AuthLinksServiceGetAuthMenusDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), - queryFn: () => AuthLinksService.getAuthMenus() as TData, - ...options, - }); -/** - * Recent Dag Runs - * Get recent DAG runs. - * @param data The data for the request. - * @param data.dagRunsLimit - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIds - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @returns DAGWithLatestDagRunsCollectionResponse Successful Response - * @throws ApiError - */ -export const useDagsServiceRecentDagRunsSuspense = < - TData = Common.DagsServiceRecentDagRunsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDagsServiceRecentDagRunsKeyFn( - { - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }, - queryKey, - ), - queryFn: () => - DagsService.recentDagRuns({ - dagDisplayNamePattern, - dagIdPattern, - dagIds, - dagRunsLimit, - excludeStale, - lastDagRunState, - limit, - offset, - owners, - paused, - tags, - tagsMatchMode, - }) as TData, - ...options, - }); -/** - * Get Dependencies - * Dependencies graph. - * @param data The data for the request. - * @param data.nodeId - * @returns BaseGraphResponse Successful Response - * @throws ApiError - */ -export const useDependenciesServiceGetDependenciesSuspense = < - TData = Common.DependenciesServiceGetDependenciesDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - nodeId, - }: { - nodeId?: string; - } = {}, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), - queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, - ...options, - }); -/** - * Historical Metrics - * Return cluster activity historical metrics. - * @param data The data for the request. - * @param data.startDate - * @param data.endDate - * @returns HistoricalMetricDataResponse Successful Response - * @throws ApiError - */ -export const useDashboardServiceHistoricalMetricsSuspense = < - TData = Common.DashboardServiceHistoricalMetricsDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - endDate, - startDate, - }: { - endDate?: string; - startDate: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), - queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, - ...options, - }); -/** - * Structure Data - * Get Structure Data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.externalDependencies - * @param data.versionNumber - * @returns StructureDataResponse Successful Response - * @throws ApiError - */ -export const useStructureServiceStructureDataSuspense = < - TData = Common.StructureServiceStructureDataDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseStructureServiceStructureDataKeyFn( - { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, - queryKey, - ), - queryFn: () => - StructureService.structureData({ - dagId, - externalDependencies, - includeDownstream, - includeUpstream, - root, - versionNumber, - }) as TData, - ...options, - }); -/** - * Grid Data - * Return grid data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.offset - * @param data.runType - * @param data.state - * @param data.limit - * @param data.orderBy - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @returns GridResponse Successful Response - * @throws ApiError - */ -export const useGridServiceGridDataSuspense = < - TData = Common.GridServiceGridDataDefaultResponse, - TError = unknown, - TQueryKey extends Array = unknown[], ->( - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, -) => - useSuspenseQuery({ - queryKey: Common.UseGridServiceGridDataKeyFn( - { - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }, - queryKey, - ), - queryFn: () => - GridService.gridData({ - dagId, - includeDownstream, - includeUpstream, - limit, - logicalDateGte, - logicalDateLte, - offset, - orderBy, - root, - runAfterGte, - runAfterLte, - runType, - state, - }) as TData, - ...options, - }); +* Get Assets +* Get assets. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.uriPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagIds +* @param data.onlyActive +* @param data.orderBy +* @returns AssetCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetsSuspense = = unknown[]>({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string[]; + uriPattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, queryKey), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) as TData, ...options }); +/** +* Get Asset Aliases +* Get asset aliases. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.orderBy +* @returns AssetAliasCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetAliasesSuspense = = unknown[]>({ limit, namePattern, offset, orderBy }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, ...options }); +/** +* Get Asset Alias +* Get an asset alias. +* @param data The data for the request. +* @param data.assetAliasId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetAliasSuspense = = unknown[]>({ assetAliasId }: { + assetAliasId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, ...options }); +/** +* Get Asset Events +* Get asset events. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.assetId +* @param data.sourceDagId +* @param data.sourceTaskId +* @param data.sourceRunId +* @param data.sourceMapIndex +* @param data.timestampGte +* @param data.timestampLte +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetEventsSuspense = = unknown[]>({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string[]; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }, queryKey), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) as TData, ...options }); +/** +* Get Asset Queued Events +* Get queued asset events for an asset. +* @param data The data for the request. +* @param data.assetId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetQueuedEventsSuspense = = unknown[]>({ assetId, before }: { + assetId: number; + before?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, ...options }); +/** +* Get Asset +* Get an asset. +* @param data The data for the request. +* @param data.assetId +* @returns AssetResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetAssetSuspense = = unknown[]>({ assetId }: { + assetId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), queryFn: () => AssetService.getAsset({ assetId }) as TData, ...options }); +/** +* Get Dag Asset Queued Events +* Get queued asset events for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.before +* @returns QueuedEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetDagAssetQueuedEventsSuspense = = unknown[]>({ before, dagId }: { + before?: string; + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, ...options }); +/** +* Get Dag Asset Queued Event +* Get a queued asset event for a DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.assetId +* @param data.before +* @returns QueuedEventResponse Successful Response +* @throws ApiError +*/ +export const useAssetServiceGetDagAssetQueuedEventSuspense = = unknown[]>({ assetId, before, dagId }: { + assetId: number; + before?: string; + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, ...options }); +/** +* Next Run Assets +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useAssetServiceNextRunAssetsSuspense = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, ...options }); +/** +* List Backfills +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceListBackfillsSuspense = = unknown[]>({ dagId, limit, offset, orderBy }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, ...options }); +/** +* Get Backfill +* @param data The data for the request. +* @param data.backfillId +* @returns BackfillResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceGetBackfillSuspense = = unknown[]>({ backfillId }: { + backfillId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, ...options }); +/** +* List Backfills Ui +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.active +* @returns BackfillCollectionResponse Successful Response +* @throws ApiError +*/ +export const useBackfillServiceListBackfillsUiSuspense = = unknown[]>({ active, dagId, limit, offset, orderBy }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseBackfillServiceListBackfillsUiKeyFn({ active, dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfillsUi({ active, dagId, limit, offset, orderBy }) as TData, ...options }); +/** +* Get Connection +* Get a connection entry. +* @param data The data for the request. +* @param data.connectionId +* @returns ConnectionResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServiceGetConnectionSuspense = = unknown[]>({ connectionId }: { + connectionId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, ...options }); +/** +* Get Connections +* Get all connection entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.connectionIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns ConnectionCollectionResponse Successful Response +* @throws ApiError +*/ +export const useConnectionServiceGetConnectionsSuspense = = unknown[]>({ connectionIdPattern, limit, offset, orderBy }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }, queryKey), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, ...options }); +/** +* Hook Meta Data +* Retrieve information about available connection types (hook classes) and their parameters. +* @returns ConnectionHookMetaData Successful Response +* @throws ApiError +*/ +export const useConnectionServiceHookMetaDataSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), queryFn: () => ConnectionService.hookMetaData() as TData, ...options }); +/** +* Get Dag Run +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns DAGRunResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceGetDagRunSuspense = = unknown[]>({ dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, ...options }); +/** +* Get Upstream Asset Events +* If dag run is asset-triggered, return the asset events that triggered it. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @returns AssetEventCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceGetUpstreamAssetEventsSuspense = = unknown[]>({ dagId, dagRunId }: { + dagId: string; + dagRunId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, ...options }); +/** +* Get Dag Runs +* Get all DAG Runs. +* +* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.runType +* @param data.state +* @param data.orderBy +* @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.triggeringUserNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGRunCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagRunServiceGetDagRunsSuspense = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; + runIdPattern?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + triggeringUserNamePattern?: string; + updatedAtGte?: string; + updatedAtLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, triggeringUserNamePattern, updatedAtGte, updatedAtLte }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagRunServiceWaitDagRunUntilFinishedSuspense = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useExperimentalServiceWaitDagRunUntilFinishedSuspense = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); +/** +* Get Dag Source +* Get source code using file token. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @param data.accept +* @returns DAGSourceResponse Successful Response +* @throws ApiError +*/ +export const useDagSourceServiceGetDagSourceSuspense = = unknown[]>({ accept, dagId, versionNumber }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, ...options }); +/** +* Get Dag Stats +* Get Dag statistics. +* @param data The data for the request. +* @param data.dagIds +* @returns DagStatsCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagStatsServiceGetDagStatsSuspense = = unknown[]>({ dagIds }: { + dagIds?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, ...options }); +/** +* Get Dag Reports +* Get DAG report. +* @param data The data for the request. +* @param data.subdir +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagReportServiceGetDagReportsSuspense = = unknown[]>({ subdir }: { + subdir: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), queryFn: () => DagReportService.getDagReports({ subdir }) as TData, ...options }); +/** +* Get Config +* @param data The data for the request. +* @param data.section +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const useConfigServiceGetConfigSuspense = = unknown[]>({ accept, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), queryFn: () => ConfigService.getConfig({ accept, section }) as TData, ...options }); +/** +* Get Config Value +* @param data The data for the request. +* @param data.section +* @param data.option +* @param data.accept +* @returns Config Successful Response +* @throws ApiError +*/ +export const useConfigServiceGetConfigValueSuspense = = unknown[]>({ accept, option, section }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, ...options }); +/** +* Get Configs +* Get configs for UI. +* @returns ConfigResponse Successful Response +* @throws ApiError +*/ +export const useConfigServiceGetConfigsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), queryFn: () => ConfigService.getConfigs() as TData, ...options }); +/** +* List Dag Warnings +* Get a list of DAG warnings. +* @param data The data for the request. +* @param data.dagId +* @param data.warningType +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns DAGWarningCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagWarningServiceListDagWarningsSuspense = = unknown[]>({ dagId, limit, offset, orderBy, warningType }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + warningType?: DagWarningType; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }, queryKey), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, ...options }); +/** +* Get Dags +* Get all DAGs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.dagRunStartDateGte +* @param data.dagRunStartDateLte +* @param data.dagRunEndDateGte +* @param data.dagRunEndDateLte +* @param data.dagRunState +* @param data.orderBy +* @param data.isFavorite +* @returns DAGCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagsSuspense = = unknown[]>({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +/** +* Get Dag +* Get basic information about a DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagSuspense = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDag({ dagId }) as TData, ...options }); +/** +* Get Dag Details +* Get details of DAG. +* @param data The data for the request. +* @param data.dagId +* @returns DAGDetailsResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagDetailsSuspense = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDagDetails({ dagId }) as TData, ...options }); +/** +* Get Dag Tags +* Get all DAG tags. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.tagNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns DAGTagCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagTagsSuspense = = unknown[]>({ limit, offset, orderBy, tagNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + tagNamePattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, ...options }); +/** +* Get Dags +* Get DAGs with recent DagRun. +* @param data The data for the request. +* @param data.dagRunsLimit +* @param data.limit +* @param data.offset +* @param data.tags +* @param data.tagsMatchMode +* @param data.owners +* @param data.dagIds +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.excludeStale +* @param data.paused +* @param data.lastDagRunState +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @param data.isFavorite +* @returns DAGWithLatestDagRunsCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagServiceGetDagsUiSuspense = = unknown[]>({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { + bundleName?: string; + bundleVersion?: string; + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + isFavorite?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string[]; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDagsUi({ bundleName, bundleVersion, dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +/** +* Get Latest Run Info +* Get latest run. +* @param data The data for the request. +* @param data.dagId +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagServiceGetLatestRunInfoSuspense = = unknown[]>({ dagId }: { + dagId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetLatestRunInfoKeyFn({ dagId }, queryKey), queryFn: () => DagService.getLatestRunInfo({ dagId }) as TData, ...options }); +/** +* Get Event Log +* @param data The data for the request. +* @param data.eventLogId +* @returns EventLogResponse Successful Response +* @throws ApiError +*/ +export const useEventLogServiceGetEventLogSuspense = = unknown[]>({ eventLogId }: { + eventLogId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, ...options }); +/** +* Get Event Logs +* Get all Event Logs. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.taskId +* @param data.runId +* @param data.mapIndex +* @param data.tryNumber +* @param data.owner +* @param data.event +* @param data.excludedEvents +* @param data.includedEvents +* @param data.before +* @param data.after +* @returns EventLogCollectionResponse Successful Response +* @throws ApiError +*/ +export const useEventLogServiceGetEventLogsSuspense = = unknown[]>({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string[]; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }, queryKey), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) as TData, ...options }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const useExtraLinksServiceGetExtraLinksSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Extra Links +* Get extra links for task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns ExtraLinkCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetExtraLinksSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceSuspense = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Task Instances +* Get list of mapped task instances. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstancesSuspense = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instance Dependencies +* Get dependencies blocking task from getting scheduled. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskDependencyCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceDependenciesSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instance Tries +* Get list of task instances history. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceTriesSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Mapped Task Instance Tries +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceHistoryCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstanceTriesSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Mapped Task Instance +* Get task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns TaskInstanceResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstanceSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Task Instances +* Get list of task instances. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs +* and DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.runAfterGte +* @param data.runAfterLte +* @param data.logicalDateGte +* @param data.logicalDateLte +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.updatedAtGte +* @param data.updatedAtLte +* @param data.durationGte +* @param data.durationLte +* @param data.taskDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.pool +* @param data.queue +* @param data.executor +* @param data.versionNumber +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns TaskInstanceCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstancesSuspense = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string[]; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); +/** +* Get Task Instance Try Details +* Get task instance details by try number. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetTaskInstanceTryDetailsSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); +/** +* Get Mapped Task Instance Try Details +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.taskTryNumber +* @param data.mapIndex +* @returns TaskInstanceHistoryResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); +/** +* Get Log +* Get logs for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.fullContent +* @param data.mapIndex +* @param data.token +* @param data.accept +* @returns TaskInstancesLogResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetLogSuspense = = unknown[]>({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, queryKey), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) as TData, ...options }); +/** +* Get External Log Url +* Get external log URL for a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.tryNumber +* @param data.mapIndex +* @returns ExternalLogUrlResponse Successful Response +* @throws ApiError +*/ +export const useTaskInstanceServiceGetExternalLogUrlSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, tryNumber }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + tryNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetExternalLogUrlKeyFn({ dagId, dagRunId, mapIndex, taskId, tryNumber }, queryKey), queryFn: () => TaskInstanceService.getExternalLogUrl({ dagId, dagRunId, mapIndex, taskId, tryNumber }) as TData, ...options }); +/** +* Get Import Error +* Get an import error. +* @param data The data for the request. +* @param data.importErrorId +* @returns ImportErrorResponse Successful Response +* @throws ApiError +*/ +export const useImportErrorServiceGetImportErrorSuspense = = unknown[]>({ importErrorId }: { + importErrorId: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, ...options }); +/** +* Get Import Errors +* Get all import errors. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @returns ImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const useImportErrorServiceGetImportErrorsSuspense = = unknown[]>({ limit, offset, orderBy }: { + limit?: number; + offset?: number; + orderBy?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options }); +/** +* Get Jobs +* Get all jobs. +* @param data The data for the request. +* @param data.isAlive +* @param data.startDateGte +* @param data.startDateLte +* @param data.endDateGte +* @param data.endDateLte +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.jobState +* @param data.jobType +* @param data.hostname +* @param data.executorClass +* @returns JobCollectionResponse Successful Response +* @throws ApiError +*/ +export const useJobServiceGetJobsSuspense = = unknown[]>({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + startDateGte?: string; + startDateLte?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }, queryKey), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) as TData, ...options }); +/** +* Get Plugins +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns PluginCollectionResponse Successful Response +* @throws ApiError +*/ +export const usePluginServiceGetPluginsSuspense = = unknown[]>({ limit, offset }: { + limit?: number; + offset?: number; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, ...options }); +/** +* Import Errors +* @returns PluginImportErrorCollectionResponse Successful Response +* @throws ApiError +*/ +export const usePluginServiceImportErrorsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UsePluginServiceImportErrorsKeyFn(queryKey), queryFn: () => PluginService.importErrors() as TData, ...options }); +/** +* Get Pool +* Get a pool. +* @param data The data for the request. +* @param data.poolName +* @returns PoolResponse Successful Response +* @throws ApiError +*/ +export const usePoolServiceGetPoolSuspense = = unknown[]>({ poolName }: { + poolName: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), queryFn: () => PoolService.getPool({ poolName }) as TData, ...options }); +/** +* Get Pools +* Get all pools entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.poolNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns PoolCollectionResponse Successful Response +* @throws ApiError +*/ +export const usePoolServiceGetPoolsSuspense = = unknown[]>({ limit, offset, orderBy, poolNamePattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + poolNamePattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, ...options }); +/** +* Get Providers +* Get providers. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @returns ProviderCollectionResponse Successful Response +* @throws ApiError +*/ +export const useProviderServiceGetProvidersSuspense = = unknown[]>({ limit, offset }: { + limit?: number; + offset?: number; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, ...options }); +/** +* Get Xcom Entry +* Get an XCom entry. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @param data.dagRunId +* @param data.xcomKey +* @param data.mapIndex +* @param data.deserialize +* @param data.stringify +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useXcomServiceGetXcomEntrySuspense = = unknown[]>({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) as TData, ...options }); +/** +* Get Xcom Entries +* Get all XCom entries. +* +* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.xcomKey +* @param data.mapIndex +* @param data.limit +* @param data.offset +* @returns XComCollectionResponse Successful Response +* @throws ApiError +*/ +export const useXcomServiceGetXcomEntriesSuspense = = unknown[]>({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, ...options }); +/** +* Get Tasks +* Get tasks for DAG. +* @param data The data for the request. +* @param data.dagId +* @param data.orderBy +* @returns TaskCollectionResponse Successful Response +* @throws ApiError +*/ +export const useTaskServiceGetTasksSuspense = = unknown[]>({ dagId, orderBy }: { + dagId: string; + orderBy?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, ...options }); +/** +* Get Task +* Get simplified representation of a task. +* @param data The data for the request. +* @param data.dagId +* @param data.taskId +* @returns TaskResponse Successful Response +* @throws ApiError +*/ +export const useTaskServiceGetTaskSuspense = = unknown[]>({ dagId, taskId }: { + dagId: string; + taskId: unknown; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, ...options }); +/** +* Get Variable +* Get a variable entry. +* @param data The data for the request. +* @param data.variableKey +* @returns VariableResponse Successful Response +* @throws ApiError +*/ +export const useVariableServiceGetVariableSuspense = = unknown[]>({ variableKey }: { + variableKey: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), queryFn: () => VariableService.getVariable({ variableKey }) as TData, ...options }); +/** +* Get Variables +* Get all Variables entries. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.variableKeyPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns VariableCollectionResponse Successful Response +* @throws ApiError +*/ +export const useVariableServiceGetVariablesSuspense = = unknown[]>({ limit, offset, orderBy, variableKeyPattern }: { + limit?: number; + offset?: number; + orderBy?: string[]; + variableKeyPattern?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }, queryKey), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, ...options }); +/** +* Get Dag Version +* Get one Dag Version. +* @param data The data for the request. +* @param data.dagId +* @param data.versionNumber +* @returns DagVersionResponse Successful Response +* @throws ApiError +*/ +export const useDagVersionServiceGetDagVersionSuspense = = unknown[]>({ dagId, versionNumber }: { + dagId: string; + versionNumber: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, ...options }); +/** +* Get Dag Versions +* Get all DAG Versions. +* +* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. +* @param data The data for the request. +* @param data.dagId +* @param data.limit +* @param data.offset +* @param data.versionNumber +* @param data.bundleName +* @param data.bundleVersion +* @param data.orderBy +* @returns DAGVersionCollectionResponse Successful Response +* @throws ApiError +*/ +export const useDagVersionServiceGetDagVersionsSuspense = = unknown[]>({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + versionNumber?: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); +/** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetailSuspense = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetMappedTiHitlDetailSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @param data The data for the request. +* @param data.limit +* @param data.offset +* @param data.orderBy +* @param data.dagId +* @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.dagRunId +* @param data.taskId +* @param data.taskIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.state +* @param data.responseReceived +* @param data.userId +* @param data.subjectSearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @param data.bodySearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetailsSuspense = = unknown[]>({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }: { + bodySearch?: string; + dagId?: string; + dagIdPattern?: string; + dagRunId?: string; + limit?: number; + offset?: number; + orderBy?: string[]; + responseReceived?: boolean; + state?: string[]; + subjectSearch?: string; + taskId?: string; + taskIdPattern?: string; + userId?: string[]; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetails({ bodySearch, dagId, dagIdPattern, dagRunId, limit, offset, orderBy, responseReceived, state, subjectSearch, taskId, taskIdPattern, userId }) as TData, ...options }); +/** +* Get Health +* @returns HealthInfoResponse Successful Response +* @throws ApiError +*/ +export const useMonitorServiceGetHealthSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), queryFn: () => MonitorService.getHealth() as TData, ...options }); +/** +* Get Version +* Get version information. +* @returns VersionInfo Successful Response +* @throws ApiError +*/ +export const useVersionServiceGetVersionSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), queryFn: () => VersionService.getVersion() as TData, ...options }); +/** +* Login +* Redirect to the login URL depending on the AuthManager configured. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useLoginServiceLoginSuspense = = unknown[]>({ next }: { + next?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), queryFn: () => LoginService.login({ next }) as TData, ...options }); +/** +* Logout +* Logout the user. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useLoginServiceLogoutSuspense = = unknown[]>({ next }: { + next?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), queryFn: () => LoginService.logout({ next }) as TData, ...options }); +/** +* Refresh +* Refresh the authentication token. +* @param data The data for the request. +* @param data.next +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useLoginServiceRefreshSuspense = = unknown[]>({ next }: { + next?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseLoginServiceRefreshKeyFn({ next }, queryKey), queryFn: () => LoginService.refresh({ next }) as TData, ...options }); +/** +* Get Auth Menus +* @returns MenuItemCollectionResponse Successful Response +* @throws ApiError +*/ +export const useAuthLinksServiceGetAuthMenusSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), queryFn: () => AuthLinksService.getAuthMenus() as TData, ...options }); +/** +* Get Dependencies +* Dependencies graph. +* @param data The data for the request. +* @param data.nodeId +* @returns BaseGraphResponse Successful Response +* @throws ApiError +*/ +export const useDependenciesServiceGetDependenciesSuspense = = unknown[]>({ nodeId }: { + nodeId?: string; +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, ...options }); +/** +* Historical Metrics +* Return cluster activity historical metrics. +* @param data The data for the request. +* @param data.startDate +* @param data.endDate +* @returns HistoricalMetricDataResponse Successful Response +* @throws ApiError +*/ +export const useDashboardServiceHistoricalMetricsSuspense = = unknown[]>({ endDate, startDate }: { + endDate?: string; + startDate: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, ...options }); +/** +* Dag Stats +* Return basic DAG stats with counts of DAGs in various states. +* @returns DashboardDagStatsResponse Successful Response +* @throws ApiError +*/ +export const useDashboardServiceDagStatsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(queryKey), queryFn: () => DashboardService.dagStats() as TData, ...options }); +/** +* Structure Data +* Get Structure Data. +* @param data The data for the request. +* @param data.dagId +* @param data.includeUpstream +* @param data.includeDownstream +* @param data.root +* @param data.externalDependencies +* @param data.versionNumber +* @returns StructureDataResponse Successful Response +* @throws ApiError +*/ +export const useStructureServiceStructureDataSuspense = = unknown[]>({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, queryKey), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) as TData, ...options }); +/** +* Get Dag Structure +* Return dag structure for grid view. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridNodeResponse Successful Response +* @throws ApiError +*/ +export const useGridServiceGetDagStructureSuspense = = unknown[]>({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseGridServiceGetDagStructureKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }, queryKey), queryFn: () => GridService.getDagStructure({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) as TData, ...options }); +/** +* Get Grid Runs +* Get info about a run for the grid. +* @param data The data for the request. +* @param data.dagId +* @param data.offset +* @param data.limit +* @param data.orderBy +* @param data.runAfterGte +* @param data.runAfterLte +* @returns GridRunsResponse Successful Response +* @throws ApiError +*/ +export const useGridServiceGetGridRunsSuspense = = unknown[]>({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string[]; + runAfterGte?: string; + runAfterLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseGridServiceGetGridRunsKeyFn({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }, queryKey), queryFn: () => GridService.getGridRuns({ dagId, limit, offset, orderBy, runAfterGte, runAfterLte }) as TData, ...options }); +/** +* Get Grid Ti Summaries +* Get states for TIs / "groups" of TIs. +* +* Essentially this is to know what color to put in the squares in the grid. +* +* The tricky part here is that we aggregate the state for groups and mapped tasks. +* +* We don't add all the TIs for mapped TIs -- we only add one entry for the mapped task and +* its state is an aggregate of its TI states. +* +* And for task groups, we add a "task" for that which is not really a task but is just +* an entry that represents the group (so that we can show a filled in box when the group +* is not expanded) and its state is an agg of those within it. +* @param data The data for the request. +* @param data.dagId +* @param data.runId +* @returns GridTISummaries Successful Response +* @throws ApiError +*/ +export const useGridServiceGetGridTiSummariesSuspense = = unknown[]>({ dagId, runId }: { + dagId: string; + runId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseGridServiceGetGridTiSummariesKeyFn({ dagId, runId }, queryKey), queryFn: () => GridService.getGridTiSummaries({ dagId, runId }) as TData, ...options }); +/** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const useCalendarServiceGetCalendarSuspense = = unknown[]>({ dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }, queryKey), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) as TData, ...options }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiError.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiError.ts index fc85c6d1888ba..36675d288a538 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiError.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiError.ts @@ -1,21 +1,21 @@ -import type { ApiRequestOptions } from "./ApiRequestOptions"; -import type { ApiResult } from "./ApiResult"; +import type { ApiRequestOptions } from './ApiRequestOptions'; +import type { ApiResult } from './ApiResult'; export class ApiError extends Error { - public readonly url: string; - public readonly status: number; - public readonly statusText: string; - public readonly body: unknown; - public readonly request: ApiRequestOptions; + public readonly url: string; + public readonly status: number; + public readonly statusText: string; + public readonly body: unknown; + public readonly request: ApiRequestOptions; - constructor(request: ApiRequestOptions, response: ApiResult, message: string) { - super(message); + constructor(request: ApiRequestOptions, response: ApiResult, message: string) { + super(message); - this.name = "ApiError"; - this.url = response.url; - this.status = response.status; - this.statusText = response.statusText; - this.body = response.body; - this.request = request; - } -} + this.name = 'ApiError'; + this.url = response.url; + this.status = response.status; + this.statusText = response.statusText; + this.body = response.body; + this.request = request; + } +} \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiRequestOptions.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiRequestOptions.ts index a74f880c89a2f..939a0aa4c8b25 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiRequestOptions.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiRequestOptions.ts @@ -1,14 +1,21 @@ export type ApiRequestOptions = { - readonly body?: any; - readonly cookies?: Record; - readonly errors?: Record; - readonly formData?: Record | any[] | Blob | File; - readonly headers?: Record; - readonly mediaType?: string; - readonly method: "DELETE" | "GET" | "HEAD" | "OPTIONS" | "PATCH" | "POST" | "PUT"; - readonly path?: Record; - readonly query?: Record; - readonly responseHeader?: string; - readonly responseTransformer?: (data: unknown) => Promise; - readonly url: string; -}; + readonly body?: any; + readonly cookies?: Record; + readonly errors?: Record; + readonly formData?: Record | any[] | Blob | File; + readonly headers?: Record; + readonly mediaType?: string; + readonly method: + | 'DELETE' + | 'GET' + | 'HEAD' + | 'OPTIONS' + | 'PATCH' + | 'POST' + | 'PUT'; + readonly path?: Record; + readonly query?: Record; + readonly responseHeader?: string; + readonly responseTransformer?: (data: unknown) => Promise; + readonly url: string; +}; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiResult.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiResult.ts index 05040ba816733..4c58e391382b1 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiResult.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/core/ApiResult.ts @@ -1,7 +1,7 @@ export type ApiResult = { - readonly body: TData; - readonly ok: boolean; - readonly status: number; - readonly statusText: string; - readonly url: string; -}; + readonly body: TData; + readonly ok: boolean; + readonly status: number; + readonly statusText: string; + readonly url: string; +}; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/core/CancelablePromise.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/core/CancelablePromise.ts index 0640e989760e4..ccc082e8f2a32 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/core/CancelablePromise.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/core/CancelablePromise.ts @@ -1,126 +1,126 @@ export class CancelError extends Error { - constructor(message: string) { - super(message); - this.name = "CancelError"; - } - - public get isCancelled(): boolean { - return true; - } + constructor(message: string) { + super(message); + this.name = 'CancelError'; + } + + public get isCancelled(): boolean { + return true; + } } export interface OnCancel { - readonly isResolved: boolean; - readonly isRejected: boolean; - readonly isCancelled: boolean; + readonly isResolved: boolean; + readonly isRejected: boolean; + readonly isCancelled: boolean; - (cancelHandler: () => void): void; + (cancelHandler: () => void): void; } export class CancelablePromise implements Promise { - private _isResolved: boolean; - private _isRejected: boolean; - private _isCancelled: boolean; - readonly cancelHandlers: (() => void)[]; - readonly promise: Promise; - private _resolve?: (value: T | PromiseLike) => void; - private _reject?: (reason?: unknown) => void; - - constructor( - executor: ( - resolve: (value: T | PromiseLike) => void, - reject: (reason?: unknown) => void, - onCancel: OnCancel, - ) => void, - ) { - this._isResolved = false; - this._isRejected = false; - this._isCancelled = false; - this.cancelHandlers = []; - this.promise = new Promise((resolve, reject) => { - this._resolve = resolve; - this._reject = reject; - - const onResolve = (value: T | PromiseLike): void => { - if (this._isResolved || this._isRejected || this._isCancelled) { - return; - } - this._isResolved = true; - if (this._resolve) this._resolve(value); - }; - - const onReject = (reason?: unknown): void => { - if (this._isResolved || this._isRejected || this._isCancelled) { - return; - } - this._isRejected = true; - if (this._reject) this._reject(reason); - }; - - const onCancel = (cancelHandler: () => void): void => { - if (this._isResolved || this._isRejected || this._isCancelled) { - return; - } - this.cancelHandlers.push(cancelHandler); - }; - - Object.defineProperty(onCancel, "isResolved", { - get: (): boolean => this._isResolved, - }); - - Object.defineProperty(onCancel, "isRejected", { - get: (): boolean => this._isRejected, - }); - - Object.defineProperty(onCancel, "isCancelled", { - get: (): boolean => this._isCancelled, - }); - - return executor(onResolve, onReject, onCancel as OnCancel); - }); - } - - get [Symbol.toStringTag]() { - return "Cancellable Promise"; - } - - public then( - onFulfilled?: ((value: T) => TResult1 | PromiseLike) | null, - onRejected?: ((reason: unknown) => TResult2 | PromiseLike) | null, - ): Promise { - return this.promise.then(onFulfilled, onRejected); - } - - public catch( - onRejected?: ((reason: unknown) => TResult | PromiseLike) | null, - ): Promise { - return this.promise.catch(onRejected); - } - - public finally(onFinally?: (() => void) | null): Promise { - return this.promise.finally(onFinally); - } - - public cancel(): void { - if (this._isResolved || this._isRejected || this._isCancelled) { - return; - } - this._isCancelled = true; - if (this.cancelHandlers.length) { - try { - for (const cancelHandler of this.cancelHandlers) { - cancelHandler(); - } - } catch (error) { - console.warn("Cancellation threw an error", error); - return; - } - } - this.cancelHandlers.length = 0; - if (this._reject) this._reject(new CancelError("Request aborted")); - } - - public get isCancelled(): boolean { - return this._isCancelled; - } -} + private _isResolved: boolean; + private _isRejected: boolean; + private _isCancelled: boolean; + readonly cancelHandlers: (() => void)[]; + readonly promise: Promise; + private _resolve?: (value: T | PromiseLike) => void; + private _reject?: (reason?: unknown) => void; + + constructor( + executor: ( + resolve: (value: T | PromiseLike) => void, + reject: (reason?: unknown) => void, + onCancel: OnCancel + ) => void + ) { + this._isResolved = false; + this._isRejected = false; + this._isCancelled = false; + this.cancelHandlers = []; + this.promise = new Promise((resolve, reject) => { + this._resolve = resolve; + this._reject = reject; + + const onResolve = (value: T | PromiseLike): void => { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this._isResolved = true; + if (this._resolve) this._resolve(value); + }; + + const onReject = (reason?: unknown): void => { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this._isRejected = true; + if (this._reject) this._reject(reason); + }; + + const onCancel = (cancelHandler: () => void): void => { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this.cancelHandlers.push(cancelHandler); + }; + + Object.defineProperty(onCancel, 'isResolved', { + get: (): boolean => this._isResolved, + }); + + Object.defineProperty(onCancel, 'isRejected', { + get: (): boolean => this._isRejected, + }); + + Object.defineProperty(onCancel, 'isCancelled', { + get: (): boolean => this._isCancelled, + }); + + return executor(onResolve, onReject, onCancel as OnCancel); + }); + } + + get [Symbol.toStringTag]() { + return "Cancellable Promise"; + } + + public then( + onFulfilled?: ((value: T) => TResult1 | PromiseLike) | null, + onRejected?: ((reason: unknown) => TResult2 | PromiseLike) | null + ): Promise { + return this.promise.then(onFulfilled, onRejected); + } + + public catch( + onRejected?: ((reason: unknown) => TResult | PromiseLike) | null + ): Promise { + return this.promise.catch(onRejected); + } + + public finally(onFinally?: (() => void) | null): Promise { + return this.promise.finally(onFinally); + } + + public cancel(): void { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this._isCancelled = true; + if (this.cancelHandlers.length) { + try { + for (const cancelHandler of this.cancelHandlers) { + cancelHandler(); + } + } catch (error) { + console.warn('Cancellation threw an error', error); + return; + } + } + this.cancelHandlers.length = 0; + if (this._reject) this._reject(new CancelError('Request aborted')); + } + + public get isCancelled(): boolean { + return this._isCancelled; + } +} \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/core/OpenAPI.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/core/OpenAPI.ts index 2b80bacec1c9e..3631974d81ba7 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/core/OpenAPI.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/core/OpenAPI.ts @@ -1,6 +1,5 @@ -import type { AxiosRequestConfig, AxiosResponse } from "axios"; - -import type { ApiRequestOptions } from "./ApiRequestOptions"; +import type { AxiosRequestConfig, AxiosResponse } from 'axios'; +import type { ApiRequestOptions } from './ApiRequestOptions'; type Headers = Record; type Middleware = (value: T) => T | Promise; @@ -26,33 +25,33 @@ export class Interceptors { } export type OpenAPIConfig = { - BASE: string; - CREDENTIALS: "include" | "omit" | "same-origin"; - ENCODE_PATH?: ((path: string) => string) | undefined; - HEADERS?: Headers | Resolver | undefined; - PASSWORD?: string | Resolver | undefined; - TOKEN?: string | Resolver | undefined; - USERNAME?: string | Resolver | undefined; - VERSION: string; - WITH_CREDENTIALS: boolean; - interceptors: { - request: Interceptors; - response: Interceptors; - }; + BASE: string; + CREDENTIALS: 'include' | 'omit' | 'same-origin'; + ENCODE_PATH?: ((path: string) => string) | undefined; + HEADERS?: Headers | Resolver | undefined; + PASSWORD?: string | Resolver | undefined; + TOKEN?: string | Resolver | undefined; + USERNAME?: string | Resolver | undefined; + VERSION: string; + WITH_CREDENTIALS: boolean; + interceptors: { + request: Interceptors; + response: Interceptors; + }; }; export const OpenAPI: OpenAPIConfig = { - BASE: "", - CREDENTIALS: "include", - ENCODE_PATH: undefined, - HEADERS: undefined, - PASSWORD: undefined, - TOKEN: undefined, - USERNAME: undefined, - VERSION: "2", - WITH_CREDENTIALS: false, - interceptors: { - request: new Interceptors(), - response: new Interceptors(), - }, -}; + BASE: '', + CREDENTIALS: 'include', + ENCODE_PATH: undefined, + HEADERS: undefined, + PASSWORD: undefined, + TOKEN: undefined, + USERNAME: undefined, + VERSION: '2', + WITH_CREDENTIALS: false, + interceptors: { + request: new Interceptors(), + response: new Interceptors(), + }, +}; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/core/request.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/core/request.ts index 319b8c47df5ad..ecc2e393cdc16 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/core/request.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/core/request.ts @@ -1,314 +1,300 @@ -import axios from "axios"; -import type { AxiosError, AxiosRequestConfig, AxiosResponse, AxiosInstance } from "axios"; +import axios from 'axios'; +import type { AxiosError, AxiosRequestConfig, AxiosResponse, AxiosInstance } from 'axios'; -import { ApiError } from "./ApiError"; -import type { ApiRequestOptions } from "./ApiRequestOptions"; -import type { ApiResult } from "./ApiResult"; -import { CancelablePromise } from "./CancelablePromise"; -import type { OnCancel } from "./CancelablePromise"; -import type { OpenAPIConfig } from "./OpenAPI"; +import { ApiError } from './ApiError'; +import type { ApiRequestOptions } from './ApiRequestOptions'; +import type { ApiResult } from './ApiResult'; +import { CancelablePromise } from './CancelablePromise'; +import type { OnCancel } from './CancelablePromise'; +import type { OpenAPIConfig } from './OpenAPI'; export const isString = (value: unknown): value is string => { - return typeof value === "string"; + return typeof value === 'string'; }; export const isStringWithValue = (value: unknown): value is string => { - return isString(value) && value !== ""; + return isString(value) && value !== ''; }; export const isBlob = (value: any): value is Blob => { - return value instanceof Blob; + return value instanceof Blob; }; export const isFormData = (value: unknown): value is FormData => { - return value instanceof FormData; + return value instanceof FormData; }; export const isSuccess = (status: number): boolean => { - return status >= 200 && status < 300; + return status >= 200 && status < 300; }; export const base64 = (str: string): string => { - try { - return btoa(str); - } catch (err) { - // @ts-ignore - return Buffer.from(str).toString("base64"); - } + try { + return btoa(str); + } catch (err) { + // @ts-ignore + return Buffer.from(str).toString('base64'); + } }; export const getQueryString = (params: Record): string => { - const qs: string[] = []; - - const append = (key: string, value: unknown) => { - qs.push(`${encodeURIComponent(key)}=${encodeURIComponent(String(value))}`); - }; - - const encodePair = (key: string, value: unknown) => { - if (value === undefined || value === null) { - return; - } - - if (value instanceof Date) { - append(key, value.toISOString()); - } else if (Array.isArray(value)) { - value.forEach((v) => encodePair(key, v)); - } else if (typeof value === "object") { - Object.entries(value).forEach(([k, v]) => encodePair(`${key}[${k}]`, v)); - } else { - append(key, value); - } - }; - - Object.entries(params).forEach(([key, value]) => encodePair(key, value)); - - return qs.length ? `?${qs.join("&")}` : ""; + const qs: string[] = []; + + const append = (key: string, value: unknown) => { + qs.push(`${encodeURIComponent(key)}=${encodeURIComponent(String(value))}`); + }; + + const encodePair = (key: string, value: unknown) => { + if (value === undefined || value === null) { + return; + } + + if (value instanceof Date) { + append(key, value.toISOString()); + } else if (Array.isArray(value)) { + value.forEach(v => encodePair(key, v)); + } else if (typeof value === 'object') { + Object.entries(value).forEach(([k, v]) => encodePair(`${key}[${k}]`, v)); + } else { + append(key, value); + } + }; + + Object.entries(params).forEach(([key, value]) => encodePair(key, value)); + + return qs.length ? `?${qs.join('&')}` : ''; }; const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => { - const encoder = config.ENCODE_PATH || encodeURI; - - const path = options.url - .replace("{api-version}", config.VERSION) - .replace(/{(.*?)}/g, (substring: string, group: string) => { - if (options.path?.hasOwnProperty(group)) { - return encoder(String(options.path[group])); - } - return substring; - }); - - const url = config.BASE + path; - return options.query ? url + getQueryString(options.query) : url; + const encoder = config.ENCODE_PATH || encodeURI; + + const path = options.url + .replace('{api-version}', config.VERSION) + .replace(/{(.*?)}/g, (substring: string, group: string) => { + if (options.path?.hasOwnProperty(group)) { + return encoder(String(options.path[group])); + } + return substring; + }); + + const url = config.BASE + path; + return options.query ? url + getQueryString(options.query) : url; }; export const getFormData = (options: ApiRequestOptions): FormData | undefined => { - if (options.formData) { - const formData = new FormData(); - - const process = (key: string, value: unknown) => { - if (isString(value) || isBlob(value)) { - formData.append(key, value); - } else { - formData.append(key, JSON.stringify(value)); - } - }; - - Object.entries(options.formData) - .filter(([, value]) => value !== undefined && value !== null) - .forEach(([key, value]) => { - if (Array.isArray(value)) { - value.forEach((v) => process(key, v)); - } else { - process(key, value); - } - }); - - return formData; - } - return undefined; + if (options.formData) { + const formData = new FormData(); + + const process = (key: string, value: unknown) => { + if (isString(value) || isBlob(value)) { + formData.append(key, value); + } else { + formData.append(key, JSON.stringify(value)); + } + }; + + Object.entries(options.formData) + .filter(([, value]) => value !== undefined && value !== null) + .forEach(([key, value]) => { + if (Array.isArray(value)) { + value.forEach(v => process(key, v)); + } else { + process(key, value); + } + }); + + return formData; + } + return undefined; }; type Resolver = (options: ApiRequestOptions) => Promise; -export const resolve = async ( - options: ApiRequestOptions, - resolver?: T | Resolver, -): Promise => { - if (typeof resolver === "function") { - return (resolver as Resolver)(options); - } - return resolver; +export const resolve = async (options: ApiRequestOptions, resolver?: T | Resolver): Promise => { + if (typeof resolver === 'function') { + return (resolver as Resolver)(options); + } + return resolver; }; -export const getHeaders = async ( - config: OpenAPIConfig, - options: ApiRequestOptions, -): Promise> => { - const [token, username, password, additionalHeaders] = await Promise.all([ - // @ts-ignore - resolve(options, config.TOKEN), - // @ts-ignore - resolve(options, config.USERNAME), - // @ts-ignore - resolve(options, config.PASSWORD), - // @ts-ignore - resolve(options, config.HEADERS), - ]); - - const headers = Object.entries({ - Accept: "application/json", - ...additionalHeaders, - ...options.headers, - }) - .filter(([, value]) => value !== undefined && value !== null) - .reduce( - (headers, [key, value]) => ({ - ...headers, - [key]: String(value), - }), - {} as Record, - ); - - if (isStringWithValue(token)) { - headers["Authorization"] = `Bearer ${token}`; - } - - if (isStringWithValue(username) && isStringWithValue(password)) { - const credentials = base64(`${username}:${password}`); - headers["Authorization"] = `Basic ${credentials}`; - } - - if (options.body !== undefined) { - if (options.mediaType) { - headers["Content-Type"] = options.mediaType; - } else if (isBlob(options.body)) { - headers["Content-Type"] = options.body.type || "application/octet-stream"; - } else if (isString(options.body)) { - headers["Content-Type"] = "text/plain"; - } else if (!isFormData(options.body)) { - headers["Content-Type"] = "application/json"; - } - } else if (options.formData !== undefined) { - if (options.mediaType) { - headers["Content-Type"] = options.mediaType; - } - } - - return headers; +export const getHeaders = async (config: OpenAPIConfig, options: ApiRequestOptions): Promise> => { + const [token, username, password, additionalHeaders] = await Promise.all([ + // @ts-ignore + resolve(options, config.TOKEN), + // @ts-ignore + resolve(options, config.USERNAME), + // @ts-ignore + resolve(options, config.PASSWORD), + // @ts-ignore + resolve(options, config.HEADERS), + ]); + + const headers = Object.entries({ + Accept: 'application/json', + ...additionalHeaders, + ...options.headers, + }) + .filter(([, value]) => value !== undefined && value !== null) + .reduce((headers, [key, value]) => ({ + ...headers, + [key]: String(value), + }), {} as Record); + + if (isStringWithValue(token)) { + headers['Authorization'] = `Bearer ${token}`; + } + + if (isStringWithValue(username) && isStringWithValue(password)) { + const credentials = base64(`${username}:${password}`); + headers['Authorization'] = `Basic ${credentials}`; + } + + if (options.body !== undefined) { + if (options.mediaType) { + headers['Content-Type'] = options.mediaType; + } else if (isBlob(options.body)) { + headers['Content-Type'] = options.body.type || 'application/octet-stream'; + } else if (isString(options.body)) { + headers['Content-Type'] = 'text/plain'; + } else if (!isFormData(options.body)) { + headers['Content-Type'] = 'application/json'; + } + } else if (options.formData !== undefined) { + if (options.mediaType) { + headers['Content-Type'] = options.mediaType; + } + } + + return headers; }; export const getRequestBody = (options: ApiRequestOptions): unknown => { - if (options.body) { - return options.body; - } - return undefined; + if (options.body) { + return options.body; + } + return undefined; }; export const sendRequest = async ( - config: OpenAPIConfig, - options: ApiRequestOptions, - url: string, - body: unknown, - formData: FormData | undefined, - headers: Record, - onCancel: OnCancel, - axiosClient: AxiosInstance, + config: OpenAPIConfig, + options: ApiRequestOptions, + url: string, + body: unknown, + formData: FormData | undefined, + headers: Record, + onCancel: OnCancel, + axiosClient: AxiosInstance ): Promise> => { - const controller = new AbortController(); - - let requestConfig: AxiosRequestConfig = { - data: body ?? formData, - headers, - method: options.method, - signal: controller.signal, - url, - withCredentials: config.WITH_CREDENTIALS, - }; - - onCancel(() => controller.abort()); - - for (const fn of config.interceptors.request._fns) { - requestConfig = await fn(requestConfig); - } - - try { - return await axiosClient.request(requestConfig); - } catch (error) { - const axiosError = error as AxiosError; - if (axiosError.response) { - return axiosError.response; - } - throw error; - } + const controller = new AbortController(); + + let requestConfig: AxiosRequestConfig = { + data: body ?? formData, + headers, + method: options.method, + signal: controller.signal, + url, + withCredentials: config.WITH_CREDENTIALS, + }; + + onCancel(() => controller.abort()); + + for (const fn of config.interceptors.request._fns) { + requestConfig = await fn(requestConfig); + } + + try { + return await axiosClient.request(requestConfig); + } catch (error) { + const axiosError = error as AxiosError; + if (axiosError.response) { + return axiosError.response; + } + throw error; + } }; -export const getResponseHeader = ( - response: AxiosResponse, - responseHeader?: string, -): string | undefined => { - if (responseHeader) { - const content = response.headers[responseHeader]; - if (isString(content)) { - return content; - } - } - return undefined; +export const getResponseHeader = (response: AxiosResponse, responseHeader?: string): string | undefined => { + if (responseHeader) { + const content = response.headers[responseHeader]; + if (isString(content)) { + return content; + } + } + return undefined; }; export const getResponseBody = (response: AxiosResponse): unknown => { - if (response.status !== 204) { - return response.data; - } - return undefined; + if (response.status !== 204) { + return response.data; + } + return undefined; }; export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => { - const errors: Record = { - 400: "Bad Request", - 401: "Unauthorized", - 402: "Payment Required", - 403: "Forbidden", - 404: "Not Found", - 405: "Method Not Allowed", - 406: "Not Acceptable", - 407: "Proxy Authentication Required", - 408: "Request Timeout", - 409: "Conflict", - 410: "Gone", - 411: "Length Required", - 412: "Precondition Failed", - 413: "Payload Too Large", - 414: "URI Too Long", - 415: "Unsupported Media Type", - 416: "Range Not Satisfiable", - 417: "Expectation Failed", - 418: "Im a teapot", - 421: "Misdirected Request", - 422: "Unprocessable Content", - 423: "Locked", - 424: "Failed Dependency", - 425: "Too Early", - 426: "Upgrade Required", - 428: "Precondition Required", - 429: "Too Many Requests", - 431: "Request Header Fields Too Large", - 451: "Unavailable For Legal Reasons", - 500: "Internal Server Error", - 501: "Not Implemented", - 502: "Bad Gateway", - 503: "Service Unavailable", - 504: "Gateway Timeout", - 505: "HTTP Version Not Supported", - 506: "Variant Also Negotiates", - 507: "Insufficient Storage", - 508: "Loop Detected", - 510: "Not Extended", - 511: "Network Authentication Required", - ...options.errors, - }; - - const error = errors[result.status]; - if (error) { - throw new ApiError(options, result, error); - } - - if (!result.ok) { - const errorStatus = result.status ?? "unknown"; - const errorStatusText = result.statusText ?? "unknown"; - const errorBody = (() => { - try { - return JSON.stringify(result.body, null, 2); - } catch (e) { - return undefined; - } - })(); - - throw new ApiError( - options, - result, - `Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}`, - ); - } + const errors: Record = { + 400: 'Bad Request', + 401: 'Unauthorized', + 402: 'Payment Required', + 403: 'Forbidden', + 404: 'Not Found', + 405: 'Method Not Allowed', + 406: 'Not Acceptable', + 407: 'Proxy Authentication Required', + 408: 'Request Timeout', + 409: 'Conflict', + 410: 'Gone', + 411: 'Length Required', + 412: 'Precondition Failed', + 413: 'Payload Too Large', + 414: 'URI Too Long', + 415: 'Unsupported Media Type', + 416: 'Range Not Satisfiable', + 417: 'Expectation Failed', + 418: 'Im a teapot', + 421: 'Misdirected Request', + 422: 'Unprocessable Content', + 423: 'Locked', + 424: 'Failed Dependency', + 425: 'Too Early', + 426: 'Upgrade Required', + 428: 'Precondition Required', + 429: 'Too Many Requests', + 431: 'Request Header Fields Too Large', + 451: 'Unavailable For Legal Reasons', + 500: 'Internal Server Error', + 501: 'Not Implemented', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + 504: 'Gateway Timeout', + 505: 'HTTP Version Not Supported', + 506: 'Variant Also Negotiates', + 507: 'Insufficient Storage', + 508: 'Loop Detected', + 510: 'Not Extended', + 511: 'Network Authentication Required', + ...options.errors, + } + + const error = errors[result.status]; + if (error) { + throw new ApiError(options, result, error); + } + + if (!result.ok) { + const errorStatus = result.status ?? 'unknown'; + const errorStatusText = result.statusText ?? 'unknown'; + const errorBody = (() => { + try { + return JSON.stringify(result.body, null, 2); + } catch (e) { + return undefined; + } + })(); + + throw new ApiError(options, result, + `Generic Error: status: ${errorStatus}; status text: ${errorStatusText}; body: ${errorBody}` + ); + } }; /** @@ -319,56 +305,43 @@ export const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): * @returns CancelablePromise * @throws ApiError */ -export const request = ( - config: OpenAPIConfig, - options: ApiRequestOptions, - axiosClient: AxiosInstance = axios, -): CancelablePromise => { - return new CancelablePromise(async (resolve, reject, onCancel) => { - try { - const url = getUrl(config, options); - const formData = getFormData(options); - const body = getRequestBody(options); - const headers = await getHeaders(config, options); - - if (!onCancel.isCancelled) { - let response = await sendRequest( - config, - options, - url, - body, - formData, - headers, - onCancel, - axiosClient, - ); - - for (const fn of config.interceptors.response._fns) { - response = await fn(response); - } - - const responseBody = getResponseBody(response); - const responseHeader = getResponseHeader(response, options.responseHeader); - - let transformedBody = responseBody; - if (options.responseTransformer && isSuccess(response.status)) { - transformedBody = await options.responseTransformer(responseBody); - } - - const result: ApiResult = { - url, - ok: isSuccess(response.status), - status: response.status, - statusText: response.statusText, - body: responseHeader ?? transformedBody, - }; - - catchErrorCodes(options, result); - - resolve(result.body); - } - } catch (error) { - reject(error); - } - }); -}; +export const request = (config: OpenAPIConfig, options: ApiRequestOptions, axiosClient: AxiosInstance = axios): CancelablePromise => { + return new CancelablePromise(async (resolve, reject, onCancel) => { + try { + const url = getUrl(config, options); + const formData = getFormData(options); + const body = getRequestBody(options); + const headers = await getHeaders(config, options); + + if (!onCancel.isCancelled) { + let response = await sendRequest(config, options, url, body, formData, headers, onCancel, axiosClient); + + for (const fn of config.interceptors.response._fns) { + response = await fn(response); + } + + const responseBody = getResponseBody(response); + const responseHeader = getResponseHeader(response, options.responseHeader); + + let transformedBody = responseBody; + if (options.responseTransformer && isSuccess(response.status)) { + transformedBody = await options.responseTransformer(responseBody) + } + + const result: ApiResult = { + url, + ok: isSuccess(response.status), + status: response.status, + statusText: response.statusText, + body: responseHeader ?? transformedBody, + }; + + catchErrorCodes(options, result); + + resolve(result.body); + } + } catch (error) { + reject(error); + } + }); +}; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/index.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/index.ts index 1a73593d0567f..205031a4e7b0a 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/index.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/index.ts @@ -1,7 +1,7 @@ // This file is auto-generated by @hey-api/openapi-ts -export { ApiError } from "./core/ApiError"; -export { CancelablePromise, CancelError } from "./core/CancelablePromise"; -export { OpenAPI, type OpenAPIConfig } from "./core/OpenAPI"; -export * from "./schemas.gen"; -export * from "./services.gen"; -export * from "./types.gen"; +export { ApiError } from './core/ApiError'; +export { CancelablePromise, CancelError } from './core/CancelablePromise'; +export { OpenAPI, type OpenAPIConfig } from './core/OpenAPI'; +export * from './schemas.gen'; +export * from './services.gen'; +export * from './types.gen'; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts index dbb2634d5c276..abfcc81b07990 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -1,6930 +1,7466 @@ // This file is auto-generated by @hey-api/openapi-ts export const $AppBuilderMenuItemResponse = { - properties: { - name: { - type: "string", - title: "Name", - }, - href: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Href", - }, - category: { - anyOf: [ - { - type: "string", + properties: { + name: { + type: 'string', + title: 'Name' }, - { - type: "null", + href: { + type: 'string', + title: 'Href' }, - ], - title: "Category", + category: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Category' + } }, - }, - additionalProperties: true, - type: "object", - required: ["name"], - title: "AppBuilderMenuItemResponse", - description: "Serializer for AppBuilder Menu Item responses.", + additionalProperties: true, + type: 'object', + required: ['name', 'href'], + title: 'AppBuilderMenuItemResponse', + description: 'Serializer for AppBuilder Menu Item responses.' } as const; export const $AppBuilderViewResponse = { - properties: { - name: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Name", - }, - category: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Category", - }, - view: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + properties: { + name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Name' }, - ], - title: "View", - }, - label: { - anyOf: [ - { - type: "string", + category: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Category' }, - { - type: "null", + view: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'View' }, - ], - title: "Label", + label: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Label' + } }, - }, - additionalProperties: true, - type: "object", - title: "AppBuilderViewResponse", - description: "Serializer for AppBuilder View responses.", + additionalProperties: true, + type: 'object', + title: 'AppBuilderViewResponse', + description: 'Serializer for AppBuilder View responses.' } as const; export const $AssetAliasCollectionResponse = { - properties: { - asset_aliases: { - items: { - $ref: "#/components/schemas/AssetAliasResponse", - }, - type: "array", - title: "Asset Aliases", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + asset_aliases: { + items: { + '$ref': '#/components/schemas/AssetAliasResponse' + }, + type: 'array', + title: 'Asset Aliases' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["asset_aliases", "total_entries"], - title: "AssetAliasCollectionResponse", - description: "Asset alias collection response.", + type: 'object', + required: ['asset_aliases', 'total_entries'], + title: 'AssetAliasCollectionResponse', + description: 'Asset alias collection response.' } as const; export const $AssetAliasResponse = { - properties: { - id: { - type: "integer", - title: "Id", - }, - name: { - type: "string", - title: "Name", - }, - group: { - type: "string", - title: "Group", + properties: { + id: { + type: 'integer', + title: 'Id' + }, + name: { + type: 'string', + title: 'Name' + }, + group: { + type: 'string', + title: 'Group' + } }, - }, - type: "object", - required: ["id", "name", "group"], - title: "AssetAliasResponse", - description: "Asset alias serializer for responses.", + type: 'object', + required: ['id', 'name', 'group'], + title: 'AssetAliasResponse', + description: 'Asset alias serializer for responses.' } as const; export const $AssetCollectionResponse = { - properties: { - assets: { - items: { - $ref: "#/components/schemas/AssetResponse", - }, - type: "array", - title: "Assets", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + assets: { + items: { + '$ref': '#/components/schemas/AssetResponse' + }, + type: 'array', + title: 'Assets' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["assets", "total_entries"], - title: "AssetCollectionResponse", - description: "Asset collection response.", + type: 'object', + required: ['assets', 'total_entries'], + title: 'AssetCollectionResponse', + description: 'Asset collection response.' } as const; export const $AssetEventCollectionResponse = { - properties: { - asset_events: { - items: { - $ref: "#/components/schemas/AssetEventResponse", - }, - type: "array", - title: "Asset Events", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + asset_events: { + items: { + '$ref': '#/components/schemas/AssetEventResponse' + }, + type: 'array', + title: 'Asset Events' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["asset_events", "total_entries"], - title: "AssetEventCollectionResponse", - description: "Asset event collection response.", + type: 'object', + required: ['asset_events', 'total_entries'], + title: 'AssetEventCollectionResponse', + description: 'Asset event collection response.' } as const; export const $AssetEventResponse = { - properties: { - id: { - type: "integer", - title: "Id", - }, - asset_id: { - type: "integer", - title: "Asset Id", - }, - uri: { - anyOf: [ - { - type: "string", + properties: { + id: { + type: 'integer', + title: 'Id' }, - { - type: "null", + asset_id: { + type: 'integer', + title: 'Asset Id' }, - ], - title: "Uri", - }, - name: { - anyOf: [ - { - type: "string", + uri: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Uri' }, - { - type: "null", + name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Name' }, - ], - title: "Name", - }, - group: { - anyOf: [ - { - type: "string", + group: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Group' }, - { - type: "null", + extra: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Extra' }, - ], - title: "Group", - }, - extra: { - anyOf: [ - { - additionalProperties: true, - type: "object", + source_task_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Source Task Id' + }, + source_dag_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Source Dag Id' }, - { - type: "null", + source_run_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Source Run Id' }, - ], - title: "Extra", - }, - source_task_id: { - anyOf: [ - { - type: "string", + source_map_index: { + type: 'integer', + title: 'Source Map Index' }, - { - type: "null", + created_dagruns: { + items: { + '$ref': '#/components/schemas/DagRunAssetReference' + }, + type: 'array', + title: 'Created Dagruns' }, - ], - title: "Source Task Id", + timestamp: { + type: 'string', + format: 'date-time', + title: 'Timestamp' + } }, - source_dag_id: { - anyOf: [ - { - type: "string", + type: 'object', + required: ['id', 'asset_id', 'source_map_index', 'created_dagruns', 'timestamp'], + title: 'AssetEventResponse', + description: 'Asset event serializer for responses.' +} as const; + +export const $AssetResponse = { + properties: { + id: { + type: 'integer', + title: 'Id' }, - { - type: "null", + name: { + type: 'string', + title: 'Name' }, - ], - title: "Source Dag Id", - }, - source_run_id: { - anyOf: [ - { - type: "string", + uri: { + type: 'string', + title: 'Uri' }, - { - type: "null", + group: { + type: 'string', + title: 'Group' }, - ], - title: "Source Run Id", - }, - source_map_index: { - type: "integer", - title: "Source Map Index", - }, - created_dagruns: { - items: { - $ref: "#/components/schemas/DagRunAssetReference", - }, - type: "array", - title: "Created Dagruns", - }, - timestamp: { - type: "string", - format: "date-time", - title: "Timestamp", + extra: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Extra' + }, + created_at: { + type: 'string', + format: 'date-time', + title: 'Created At' + }, + updated_at: { + type: 'string', + format: 'date-time', + title: 'Updated At' + }, + scheduled_dags: { + items: { + '$ref': '#/components/schemas/DagScheduleAssetReference' + }, + type: 'array', + title: 'Scheduled Dags' + }, + producing_tasks: { + items: { + '$ref': '#/components/schemas/TaskOutletAssetReference' + }, + type: 'array', + title: 'Producing Tasks' + }, + consuming_tasks: { + items: { + '$ref': '#/components/schemas/TaskInletAssetReference' + }, + type: 'array', + title: 'Consuming Tasks' + }, + aliases: { + items: { + '$ref': '#/components/schemas/AssetAliasResponse' + }, + type: 'array', + title: 'Aliases' + }, + last_asset_event: { + anyOf: [ + { + '$ref': '#/components/schemas/LastAssetEventResponse' + }, + { + type: 'null' + } + ] + } }, - }, - type: "object", - required: ["id", "asset_id", "source_map_index", "created_dagruns", "timestamp"], - title: "AssetEventResponse", - description: "Asset event serializer for responses.", -} as const; - -export const $AssetResponse = { - properties: { - id: { - type: "integer", - title: "Id", - }, - name: { - type: "string", - title: "Name", - }, - uri: { - type: "string", - title: "Uri", - }, - group: { - type: "string", - title: "Group", - }, - extra: { - anyOf: [ - { - additionalProperties: true, - type: "object", - }, - { - type: "null", - }, - ], - title: "Extra", - }, - created_at: { - type: "string", - format: "date-time", - title: "Created At", - }, - updated_at: { - type: "string", - format: "date-time", - title: "Updated At", - }, - consuming_dags: { - items: { - $ref: "#/components/schemas/DagScheduleAssetReference", - }, - type: "array", - title: "Consuming Dags", - }, - producing_tasks: { - items: { - $ref: "#/components/schemas/TaskOutletAssetReference", - }, - type: "array", - title: "Producing Tasks", - }, - aliases: { - items: { - $ref: "#/components/schemas/AssetAliasResponse", - }, - type: "array", - title: "Aliases", - }, - }, - type: "object", - required: [ - "id", - "name", - "uri", - "group", - "created_at", - "updated_at", - "consuming_dags", - "producing_tasks", - "aliases", - ], - title: "AssetResponse", - description: "Asset serializer for responses.", + type: 'object', + required: ['id', 'name', 'uri', 'group', 'created_at', 'updated_at', 'scheduled_dags', 'producing_tasks', 'consuming_tasks', 'aliases'], + title: 'AssetResponse', + description: 'Asset serializer for responses.' } as const; export const $BackfillCollectionResponse = { - properties: { - backfills: { - items: { - $ref: "#/components/schemas/BackfillResponse", - }, - type: "array", - title: "Backfills", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + backfills: { + items: { + '$ref': '#/components/schemas/BackfillResponse' + }, + type: 'array', + title: 'Backfills' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["backfills", "total_entries"], - title: "BackfillCollectionResponse", - description: "Backfill Collection serializer for responses.", + type: 'object', + required: ['backfills', 'total_entries'], + title: 'BackfillCollectionResponse', + description: 'Backfill Collection serializer for responses.' } as const; export const $BackfillPostBody = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - from_date: { - type: "string", - format: "date-time", - title: "From Date", - }, - to_date: { - type: "string", - format: "date-time", - title: "To Date", - }, - run_backwards: { - type: "boolean", - title: "Run Backwards", - default: false, - }, - dag_run_conf: { - additionalProperties: true, - type: "object", - title: "Dag Run Conf", - default: {}, - }, - reprocess_behavior: { - $ref: "#/components/schemas/ReprocessBehavior", - default: "none", - }, - max_active_runs: { - type: "integer", - title: "Max Active Runs", - default: 10, - }, - }, - additionalProperties: false, - type: "object", - required: ["dag_id", "from_date", "to_date"], - title: "BackfillPostBody", - description: "Object used for create backfill request.", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' + }, + from_date: { + type: 'string', + format: 'date-time', + title: 'From Date' + }, + to_date: { + type: 'string', + format: 'date-time', + title: 'To Date' + }, + run_backwards: { + type: 'boolean', + title: 'Run Backwards', + default: false + }, + dag_run_conf: { + additionalProperties: true, + type: 'object', + title: 'Dag Run Conf', + default: {} + }, + reprocess_behavior: { + '$ref': '#/components/schemas/ReprocessBehavior', + default: 'none' + }, + max_active_runs: { + type: 'integer', + title: 'Max Active Runs', + default: 10 + } + }, + additionalProperties: false, + type: 'object', + required: ['dag_id', 'from_date', 'to_date'], + title: 'BackfillPostBody', + description: 'Object used for create backfill request.' } as const; export const $BackfillResponse = { - properties: { - id: { - type: "integer", - title: "Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - from_date: { - type: "string", - format: "date-time", - title: "From Date", - }, - to_date: { - type: "string", - format: "date-time", - title: "To Date", - }, - dag_run_conf: { - additionalProperties: true, - type: "object", - title: "Dag Run Conf", - }, - is_paused: { - type: "boolean", - title: "Is Paused", - }, - reprocess_behavior: { - $ref: "#/components/schemas/ReprocessBehavior", - }, - max_active_runs: { - type: "integer", - title: "Max Active Runs", - }, - created_at: { - type: "string", - format: "date-time", - title: "Created At", - }, - completed_at: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Completed At", - }, - updated_at: { - type: "string", - format: "date-time", - title: "Updated At", - }, - }, - type: "object", - required: [ - "id", - "dag_id", - "from_date", - "to_date", - "dag_run_conf", - "is_paused", - "reprocess_behavior", - "max_active_runs", - "created_at", - "completed_at", - "updated_at", - ], - title: "BackfillResponse", - description: "Base serializer for Backfill.", -} as const; - -export const $BaseInfoResponse = { - properties: { - status: { - anyOf: [ - { - type: "string", + properties: { + id: { + type: 'integer', + minimum: 0, + title: 'Id' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + }, + from_date: { + type: 'string', + format: 'date-time', + title: 'From Date' + }, + to_date: { + type: 'string', + format: 'date-time', + title: 'To Date' + }, + dag_run_conf: { + additionalProperties: true, + type: 'object', + title: 'Dag Run Conf' + }, + is_paused: { + type: 'boolean', + title: 'Is Paused' + }, + reprocess_behavior: { + '$ref': '#/components/schemas/ReprocessBehavior' + }, + max_active_runs: { + type: 'integer', + title: 'Max Active Runs' + }, + created_at: { + type: 'string', + format: 'date-time', + title: 'Created At' + }, + completed_at: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Completed At' }, - { - type: "null", + updated_at: { + type: 'string', + format: 'date-time', + title: 'Updated At' }, - ], - title: "Status", + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + } }, - }, - type: "object", - required: ["status"], - title: "BaseInfoResponse", - description: "Base info serializer for responses.", + type: 'object', + required: ['id', 'dag_id', 'from_date', 'to_date', 'dag_run_conf', 'is_paused', 'reprocess_behavior', 'max_active_runs', 'created_at', 'completed_at', 'updated_at', 'dag_display_name'], + title: 'BackfillResponse', + description: 'Base serializer for Backfill.' } as const; -export const $BulkAction = { - type: "string", - enum: ["create", "delete", "update"], - title: "BulkAction", - description: "Bulk Action to be performed on the used model.", +export const $BaseInfoResponse = { + properties: { + status: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Status' + } + }, + type: 'object', + required: ['status'], + title: 'BaseInfoResponse', + description: 'Base info serializer for responses.' } as const; export const $BulkActionNotOnExistence = { - type: "string", - enum: ["fail", "skip"], - title: "BulkActionNotOnExistence", - description: "Bulk Action to be taken if the entity does not exist.", + type: 'string', + enum: ['fail', 'skip'], + title: 'BulkActionNotOnExistence', + description: 'Bulk Action to be taken if the entity does not exist.' } as const; export const $BulkActionOnExistence = { - type: "string", - enum: ["fail", "skip", "overwrite"], - title: "BulkActionOnExistence", - description: "Bulk Action to be taken if the entity already exists or not.", + type: 'string', + enum: ['fail', 'skip', 'overwrite'], + title: 'BulkActionOnExistence', + description: 'Bulk Action to be taken if the entity already exists or not.' } as const; export const $BulkActionResponse = { - properties: { - success: { - items: { - type: "string", - }, - type: "array", - title: "Success", - description: "A list of unique id/key representing successful operations.", - default: [], - }, - errors: { - items: { - additionalProperties: true, - type: "object", - }, - type: "array", - title: "Errors", - description: - "A list of errors encountered during the operation, each containing details about the issue.", - default: [], - }, - }, - type: "object", - title: "BulkActionResponse", - description: `Serializer for individual bulk action responses. + properties: { + success: { + items: { + type: 'string' + }, + type: 'array', + title: 'Success', + description: 'A list of unique id/key representing successful operations.', + default: [] + }, + errors: { + items: { + additionalProperties: true, + type: 'object' + }, + type: 'array', + title: 'Errors', + description: 'A list of errors encountered during the operation, each containing details about the issue.', + default: [] + } + }, + type: 'object', + title: 'BulkActionResponse', + description: `Serializer for individual bulk action responses. Represents the outcome of a single bulk operation (create, update, or delete). The response includes a list of successful keys and any errors encountered during the operation. -This structure helps users understand which key actions succeeded and which failed.`, +This structure helps users understand which key actions succeeded and which failed.` +} as const; + +export const $BulkBody_BulkTaskInstanceBody_ = { + properties: { + actions: { + items: { + oneOf: [ + { + '$ref': '#/components/schemas/BulkCreateAction_BulkTaskInstanceBody_' + }, + { + '$ref': '#/components/schemas/BulkUpdateAction_BulkTaskInstanceBody_' + }, + { + '$ref': '#/components/schemas/BulkDeleteAction_BulkTaskInstanceBody_' + } + ] + }, + type: 'array', + title: 'Actions' + } + }, + additionalProperties: false, + type: 'object', + required: ['actions'], + title: 'BulkBody[BulkTaskInstanceBody]' } as const; export const $BulkBody_ConnectionBody_ = { - properties: { - actions: { - items: { - oneOf: [ - { - $ref: "#/components/schemas/BulkCreateAction_ConnectionBody_", - }, - { - $ref: "#/components/schemas/BulkUpdateAction_ConnectionBody_", - }, - { - $ref: "#/components/schemas/BulkDeleteAction_ConnectionBody_", - }, - ], - }, - type: "array", - title: "Actions", - }, - }, - additionalProperties: false, - type: "object", - required: ["actions"], - title: "BulkBody[ConnectionBody]", + properties: { + actions: { + items: { + oneOf: [ + { + '$ref': '#/components/schemas/BulkCreateAction_ConnectionBody_' + }, + { + '$ref': '#/components/schemas/BulkUpdateAction_ConnectionBody_' + }, + { + '$ref': '#/components/schemas/BulkDeleteAction_ConnectionBody_' + } + ] + }, + type: 'array', + title: 'Actions' + } + }, + additionalProperties: false, + type: 'object', + required: ['actions'], + title: 'BulkBody[ConnectionBody]' } as const; export const $BulkBody_PoolBody_ = { - properties: { - actions: { - items: { - oneOf: [ - { - $ref: "#/components/schemas/BulkCreateAction_PoolBody_", - }, - { - $ref: "#/components/schemas/BulkUpdateAction_PoolBody_", - }, - { - $ref: "#/components/schemas/BulkDeleteAction_PoolBody_", - }, - ], - }, - type: "array", - title: "Actions", - }, - }, - additionalProperties: false, - type: "object", - required: ["actions"], - title: "BulkBody[PoolBody]", + properties: { + actions: { + items: { + oneOf: [ + { + '$ref': '#/components/schemas/BulkCreateAction_PoolBody_' + }, + { + '$ref': '#/components/schemas/BulkUpdateAction_PoolBody_' + }, + { + '$ref': '#/components/schemas/BulkDeleteAction_PoolBody_' + } + ] + }, + type: 'array', + title: 'Actions' + } + }, + additionalProperties: false, + type: 'object', + required: ['actions'], + title: 'BulkBody[PoolBody]' } as const; export const $BulkBody_VariableBody_ = { - properties: { - actions: { - items: { - oneOf: [ - { - $ref: "#/components/schemas/BulkCreateAction_VariableBody_", - }, - { - $ref: "#/components/schemas/BulkUpdateAction_VariableBody_", - }, - { - $ref: "#/components/schemas/BulkDeleteAction_VariableBody_", - }, - ], - }, - type: "array", - title: "Actions", - }, - }, - additionalProperties: false, - type: "object", - required: ["actions"], - title: "BulkBody[VariableBody]", + properties: { + actions: { + items: { + oneOf: [ + { + '$ref': '#/components/schemas/BulkCreateAction_VariableBody_' + }, + { + '$ref': '#/components/schemas/BulkUpdateAction_VariableBody_' + }, + { + '$ref': '#/components/schemas/BulkDeleteAction_VariableBody_' + } + ] + }, + type: 'array', + title: 'Actions' + } + }, + additionalProperties: false, + type: 'object', + required: ['actions'], + title: 'BulkBody[VariableBody]' +} as const; + +export const $BulkCreateAction_BulkTaskInstanceBody_ = { + properties: { + action: { + type: 'string', + const: 'create', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/BulkTaskInstanceBody' + }, + type: 'array', + title: 'Entities', + description: 'A list of entities to be created.' + }, + action_on_existence: { + '$ref': '#/components/schemas/BulkActionOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkCreateAction[BulkTaskInstanceBody]' } as const; export const $BulkCreateAction_ConnectionBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - $ref: "#/components/schemas/ConnectionBody", - }, - type: "array", - title: "Entities", - description: "A list of entities to be created.", - }, - action_on_existence: { - $ref: "#/components/schemas/BulkActionOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkCreateAction[ConnectionBody]", + properties: { + action: { + type: 'string', + const: 'create', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/ConnectionBody' + }, + type: 'array', + title: 'Entities', + description: 'A list of entities to be created.' + }, + action_on_existence: { + '$ref': '#/components/schemas/BulkActionOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkCreateAction[ConnectionBody]' } as const; export const $BulkCreateAction_PoolBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - $ref: "#/components/schemas/PoolBody", - }, - type: "array", - title: "Entities", - description: "A list of entities to be created.", - }, - action_on_existence: { - $ref: "#/components/schemas/BulkActionOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkCreateAction[PoolBody]", + properties: { + action: { + type: 'string', + const: 'create', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/PoolBody' + }, + type: 'array', + title: 'Entities', + description: 'A list of entities to be created.' + }, + action_on_existence: { + '$ref': '#/components/schemas/BulkActionOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkCreateAction[PoolBody]' } as const; export const $BulkCreateAction_VariableBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - $ref: "#/components/schemas/VariableBody", - }, - type: "array", - title: "Entities", - description: "A list of entities to be created.", - }, - action_on_existence: { - $ref: "#/components/schemas/BulkActionOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkCreateAction[VariableBody]", + properties: { + action: { + type: 'string', + const: 'create', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/VariableBody' + }, + type: 'array', + title: 'Entities', + description: 'A list of entities to be created.' + }, + action_on_existence: { + '$ref': '#/components/schemas/BulkActionOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkCreateAction[VariableBody]' +} as const; + +export const $BulkDeleteAction_BulkTaskInstanceBody_ = { + properties: { + action: { + type: 'string', + const: 'delete', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + type: 'string' + }, + type: 'array', + title: 'Entities', + description: 'A list of entity id/key to be deleted.' + }, + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkDeleteAction[BulkTaskInstanceBody]' } as const; export const $BulkDeleteAction_ConnectionBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - type: "string", - }, - type: "array", - title: "Entities", - description: "A list of entity id/key to be deleted.", - }, - action_on_non_existence: { - $ref: "#/components/schemas/BulkActionNotOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkDeleteAction[ConnectionBody]", + properties: { + action: { + type: 'string', + const: 'delete', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + type: 'string' + }, + type: 'array', + title: 'Entities', + description: 'A list of entity id/key to be deleted.' + }, + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkDeleteAction[ConnectionBody]' } as const; export const $BulkDeleteAction_PoolBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - type: "string", - }, - type: "array", - title: "Entities", - description: "A list of entity id/key to be deleted.", - }, - action_on_non_existence: { - $ref: "#/components/schemas/BulkActionNotOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkDeleteAction[PoolBody]", + properties: { + action: { + type: 'string', + const: 'delete', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + type: 'string' + }, + type: 'array', + title: 'Entities', + description: 'A list of entity id/key to be deleted.' + }, + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkDeleteAction[PoolBody]' } as const; export const $BulkDeleteAction_VariableBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - type: "string", - }, - type: "array", - title: "Entities", - description: "A list of entity id/key to be deleted.", - }, - action_on_non_existence: { - $ref: "#/components/schemas/BulkActionNotOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkDeleteAction[VariableBody]", + properties: { + action: { + type: 'string', + const: 'delete', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + type: 'string' + }, + type: 'array', + title: 'Entities', + description: 'A list of entity id/key to be deleted.' + }, + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkDeleteAction[VariableBody]' } as const; export const $BulkResponse = { - properties: { - create: { - anyOf: [ - { - $ref: "#/components/schemas/BulkActionResponse", + properties: { + create: { + anyOf: [ + { + '$ref': '#/components/schemas/BulkActionResponse' + }, + { + type: 'null' + } + ], + description: 'Details of the bulk create operation, including successful keys and errors.' }, - { - type: "null", + update: { + anyOf: [ + { + '$ref': '#/components/schemas/BulkActionResponse' + }, + { + type: 'null' + } + ], + description: 'Details of the bulk update operation, including successful keys and errors.' }, - ], - description: "Details of the bulk create operation, including successful keys and errors.", + delete: { + anyOf: [ + { + '$ref': '#/components/schemas/BulkActionResponse' + }, + { + type: 'null' + } + ], + description: 'Details of the bulk delete operation, including successful keys and errors.' + } }, - update: { - anyOf: [ - { - $ref: "#/components/schemas/BulkActionResponse", + type: 'object', + title: 'BulkResponse', + description: `Serializer for responses to bulk entity operations. + +This represents the results of create, update, and delete actions performed on entity in bulk. +Each action (if requested) is represented as a field containing details about successful keys and any encountered errors. +Fields are populated in the response only if the respective action was part of the request, else are set None.` +} as const; + +export const $BulkTaskInstanceBody = { + properties: { + new_state: { + anyOf: [ + { + '$ref': '#/components/schemas/TaskInstanceState' + }, + { + type: 'null' + } + ] + }, + note: { + anyOf: [ + { + type: 'string', + maxLength: 1000 + }, + { + type: 'null' + } + ], + title: 'Note' }, - { - type: "null", + include_upstream: { + type: 'boolean', + title: 'Include Upstream', + default: false }, - ], - description: "Details of the bulk update operation, including successful keys and errors.", - }, - delete: { - anyOf: [ - { - $ref: "#/components/schemas/BulkActionResponse", + include_downstream: { + type: 'boolean', + title: 'Include Downstream', + default: false }, - { - type: "null", + include_future: { + type: 'boolean', + title: 'Include Future', + default: false + }, + include_past: { + type: 'boolean', + title: 'Include Past', + default: false + }, + task_id: { + type: 'string', + title: 'Task Id' + }, + map_index: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Map Index' + } + }, + additionalProperties: false, + type: 'object', + required: ['task_id'], + title: 'BulkTaskInstanceBody', + description: 'Request body for bulk update, and delete task instances.' +} as const; + +export const $BulkUpdateAction_BulkTaskInstanceBody_ = { + properties: { + action: { + type: 'string', + const: 'update', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/BulkTaskInstanceBody' + }, + type: 'array', + title: 'Entities', + description: 'A list of entities to be updated.' }, - ], - description: "Details of the bulk delete operation, including successful keys and errors.", + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } }, - }, - type: "object", - title: "BulkResponse", - description: `Serializer for responses to bulk entity operations. - -This represents the results of create, update, and delete actions performed on entity in bulk. -Each action (if requested) is represented as a field containing details about successful keys and any encountered errors. -Fields are populated in the response only if the respective action was part of the request, else are set None.`, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkUpdateAction[BulkTaskInstanceBody]' } as const; export const $BulkUpdateAction_ConnectionBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - $ref: "#/components/schemas/ConnectionBody", - }, - type: "array", - title: "Entities", - description: "A list of entities to be updated.", - }, - action_on_non_existence: { - $ref: "#/components/schemas/BulkActionNotOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkUpdateAction[ConnectionBody]", + properties: { + action: { + type: 'string', + const: 'update', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/ConnectionBody' + }, + type: 'array', + title: 'Entities', + description: 'A list of entities to be updated.' + }, + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkUpdateAction[ConnectionBody]' } as const; export const $BulkUpdateAction_PoolBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - $ref: "#/components/schemas/PoolBody", - }, - type: "array", - title: "Entities", - description: "A list of entities to be updated.", - }, - action_on_non_existence: { - $ref: "#/components/schemas/BulkActionNotOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkUpdateAction[PoolBody]", + properties: { + action: { + type: 'string', + const: 'update', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/PoolBody' + }, + type: 'array', + title: 'Entities', + description: 'A list of entities to be updated.' + }, + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } + }, + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkUpdateAction[PoolBody]' } as const; export const $BulkUpdateAction_VariableBody_ = { - properties: { - action: { - $ref: "#/components/schemas/BulkAction", - description: "The action to be performed on the entities.", - }, - entities: { - items: { - $ref: "#/components/schemas/VariableBody", - }, - type: "array", - title: "Entities", - description: "A list of entities to be updated.", - }, - action_on_non_existence: { - $ref: "#/components/schemas/BulkActionNotOnExistence", - default: "fail", - }, - }, - additionalProperties: false, - type: "object", - required: ["action", "entities"], - title: "BulkUpdateAction[VariableBody]", -} as const; - -export const $ClearTaskInstancesBody = { - properties: { - dry_run: { - type: "boolean", - title: "Dry Run", - default: true, - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date", - }, - only_failed: { - type: "boolean", - title: "Only Failed", - default: true, - }, - only_running: { - type: "boolean", - title: "Only Running", - default: false, - }, - reset_dag_runs: { - type: "boolean", - title: "Reset Dag Runs", - default: true, - }, - task_ids: { - anyOf: [ - { - items: { - anyOf: [ - { - type: "string", - }, - { - prefixItems: [ - { - type: "string", - }, - { - type: "integer", - }, - ], - type: "array", - maxItems: 2, - minItems: 2, - }, - ], - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Task Ids", - }, - dag_run_id: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Dag Run Id", - }, - include_upstream: { - type: "boolean", - title: "Include Upstream", - default: false, - }, - include_downstream: { - type: "boolean", - title: "Include Downstream", - default: false, - }, - include_future: { - type: "boolean", - title: "Include Future", - default: false, - }, - include_past: { - type: "boolean", - title: "Include Past", - default: false, - }, - }, - additionalProperties: false, - type: "object", - title: "ClearTaskInstancesBody", - description: "Request body for Clear Task Instances endpoint.", -} as const; - -export const $Config = { - properties: { - sections: { - items: { - $ref: "#/components/schemas/ConfigSection", - }, - type: "array", - title: "Sections", - }, - }, - additionalProperties: false, - type: "object", - required: ["sections"], - title: "Config", - description: "List of config sections with their options.", -} as const; - -export const $ConfigOption = { - properties: { - key: { - type: "string", - title: "Key", - }, - value: { - anyOf: [ - { - type: "string", - }, - { - prefixItems: [ - { - type: "string", + properties: { + action: { + type: 'string', + const: 'update', + title: 'Action', + description: 'The action to be performed on the entities.' + }, + entities: { + items: { + '$ref': '#/components/schemas/VariableBody' }, - { - type: "string", - }, - ], - type: "array", - maxItems: 2, - minItems: 2, + type: 'array', + title: 'Entities', + description: 'A list of entities to be updated.' }, - ], - title: "Value", + action_on_non_existence: { + '$ref': '#/components/schemas/BulkActionNotOnExistence', + default: 'fail' + } }, - }, - additionalProperties: false, - type: "object", - required: ["key", "value"], - title: "ConfigOption", - description: "Config option.", -} as const; - -export const $ConfigSection = { - properties: { - name: { - type: "string", - title: "Name", - }, - options: { - items: { - $ref: "#/components/schemas/ConfigOption", - }, - type: "array", - title: "Options", - }, - }, - additionalProperties: false, - type: "object", - required: ["name", "options"], - title: "ConfigSection", - description: "Config Section Schema.", + additionalProperties: false, + type: 'object', + required: ['action', 'entities'], + title: 'BulkUpdateAction[VariableBody]' } as const; -export const $ConnectionBody = { - properties: { - connection_id: { - type: "string", - maxLength: 200, - pattern: "^[\\w.-]+$", - title: "Connection Id", - }, - conn_type: { - type: "string", - title: "Conn Type", - }, - description: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Description", - }, - host: { - anyOf: [ - { - type: "string", +export const $ClearTaskInstancesBody = { + properties: { + dry_run: { + type: 'boolean', + title: 'Dry Run', + default: true }, - { - type: "null", + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' }, - ], - title: "Host", - }, - login: { - anyOf: [ - { - type: "string", + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' }, - { - type: "null", + only_failed: { + type: 'boolean', + title: 'Only Failed', + default: true }, - ], - title: "Login", - }, - schema: { - anyOf: [ - { - type: "string", + only_running: { + type: 'boolean', + title: 'Only Running', + default: false }, - { - type: "null", + reset_dag_runs: { + type: 'boolean', + title: 'Reset Dag Runs', + default: true }, - ], - title: "Schema", - }, - port: { - anyOf: [ - { - type: "integer", + task_ids: { + anyOf: [ + { + items: { + anyOf: [ + { + type: 'string' + }, + { + prefixItems: [ + { + type: 'string' + }, + { + type: 'integer' + } + ], + type: 'array', + maxItems: 2, + minItems: 2 + } + ] + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Task Ids' }, - { - type: "null", + dag_run_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Dag Run Id' }, - ], - title: "Port", - }, - password: { - anyOf: [ - { - type: "string", + include_upstream: { + type: 'boolean', + title: 'Include Upstream', + default: false }, - { - type: "null", + include_downstream: { + type: 'boolean', + title: 'Include Downstream', + default: false }, - ], - title: "Password", - }, - extra: { - anyOf: [ - { - type: "string", + include_future: { + type: 'boolean', + title: 'Include Future', + default: false }, - { - type: "null", + include_past: { + type: 'boolean', + title: 'Include Past', + default: false }, - ], - title: "Extra", + run_on_latest_version: { + type: 'boolean', + title: 'Run On Latest Version', + description: '(Experimental) Run on the latest bundle version of the dag after clearing the task instances.', + default: false + } }, - }, - additionalProperties: false, - type: "object", - required: ["connection_id", "conn_type"], - title: "ConnectionBody", - description: "Connection Serializer for requests body.", + additionalProperties: false, + type: 'object', + title: 'ClearTaskInstancesBody', + description: 'Request body for Clear Task Instances endpoint.' } as const; -export const $ConnectionCollectionResponse = { - properties: { - connections: { - items: { - $ref: "#/components/schemas/ConnectionResponse", - }, - type: "array", - title: "Connections", - }, - total_entries: { - type: "integer", - title: "Total Entries", +export const $Config = { + properties: { + sections: { + items: { + '$ref': '#/components/schemas/ConfigSection' + }, + type: 'array', + title: 'Sections' + } }, - }, - type: "object", - required: ["connections", "total_entries"], - title: "ConnectionCollectionResponse", - description: "Connection Collection serializer for responses.", + additionalProperties: false, + type: 'object', + required: ['sections'], + title: 'Config', + description: 'List of config sections with their options.' } as const; -export const $ConnectionResponse = { - properties: { - connection_id: { - type: "string", - title: "Connection Id", +export const $ConfigOption = { + properties: { + key: { + type: 'string', + title: 'Key' + }, + value: { + anyOf: [ + { + type: 'string' + }, + { + prefixItems: [ + { + type: 'string' + }, + { + type: 'string' + } + ], + type: 'array', + maxItems: 2, + minItems: 2 + } + ], + title: 'Value' + } }, - conn_type: { - type: "string", - title: "Conn Type", + additionalProperties: false, + type: 'object', + required: ['key', 'value'], + title: 'ConfigOption', + description: 'Config option.' +} as const; + +export const $ConfigSection = { + properties: { + name: { + type: 'string', + title: 'Name' + }, + options: { + items: { + '$ref': '#/components/schemas/ConfigOption' + }, + type: 'array', + title: 'Options' + } }, - description: { - anyOf: [ - { - type: "string", + additionalProperties: false, + type: 'object', + required: ['name', 'options'], + title: 'ConfigSection', + description: 'Config Section Schema.' +} as const; + +export const $ConnectionBody = { + properties: { + connection_id: { + type: 'string', + maxLength: 200, + pattern: '^[\\w.-]+$', + title: 'Connection Id' + }, + conn_type: { + type: 'string', + title: 'Conn Type' + }, + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' }, - { - type: "null", + host: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Host' }, - ], - title: "Description", - }, - host: { - anyOf: [ - { - type: "string", + login: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Login' }, - { - type: "null", + schema: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Schema' }, - ], - title: "Host", - }, - login: { - anyOf: [ - { - type: "string", + port: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Port' }, - { - type: "null", + password: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Password' }, - ], - title: "Login", + extra: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Extra' + } }, - schema: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + additionalProperties: false, + type: 'object', + required: ['connection_id', 'conn_type'], + title: 'ConnectionBody', + description: 'Connection Serializer for requests body.' +} as const; + +export const $ConnectionCollectionResponse = { + properties: { + connections: { + items: { + '$ref': '#/components/schemas/ConnectionResponse' + }, + type: 'array', + title: 'Connections' }, - ], - title: "Schema", + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - port: { - anyOf: [ - { - type: "integer", + type: 'object', + required: ['connections', 'total_entries'], + title: 'ConnectionCollectionResponse', + description: 'Connection Collection serializer for responses.' +} as const; + +export const $ConnectionResponse = { + properties: { + connection_id: { + type: 'string', + title: 'Connection Id' }, - { - type: "null", + conn_type: { + type: 'string', + title: 'Conn Type' }, - ], - title: "Port", - }, - password: { - anyOf: [ - { - type: "string", + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' + }, + host: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Host' + }, + login: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Login' }, - { - type: "null", + schema: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Schema' }, - ], - title: "Password", - }, - extra: { - anyOf: [ - { - type: "string", + port: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Port' }, - { - type: "null", + password: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Password' }, - ], - title: "Extra", + extra: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Extra' + } }, - }, - type: "object", - required: [ - "connection_id", - "conn_type", - "description", - "host", - "login", - "schema", - "port", - "password", - "extra", - ], - title: "ConnectionResponse", - description: "Connection serializer for responses.", + type: 'object', + required: ['connection_id', 'conn_type', 'description', 'host', 'login', 'schema', 'port', 'password', 'extra'], + title: 'ConnectionResponse', + description: 'Connection serializer for responses.' } as const; export const $ConnectionTestResponse = { - properties: { - status: { - type: "boolean", - title: "Status", - }, - message: { - type: "string", - title: "Message", + properties: { + status: { + type: 'boolean', + title: 'Status' + }, + message: { + type: 'string', + title: 'Message' + } }, - }, - type: "object", - required: ["status", "message"], - title: "ConnectionTestResponse", - description: "Connection Test serializer for responses.", + type: 'object', + required: ['status', 'message'], + title: 'ConnectionTestResponse', + description: 'Connection Test serializer for responses.' } as const; export const $CreateAssetEventsBody = { - properties: { - asset_id: { - type: "integer", - title: "Asset Id", - }, - extra: { - additionalProperties: true, - type: "object", - title: "Extra", + properties: { + asset_id: { + type: 'integer', + title: 'Asset Id' + }, + extra: { + additionalProperties: true, + type: 'object', + title: 'Extra' + } }, - }, - additionalProperties: false, - type: "object", - required: ["asset_id"], - title: "CreateAssetEventsBody", - description: "Create asset events request.", + additionalProperties: false, + type: 'object', + required: ['asset_id'], + title: 'CreateAssetEventsBody', + description: 'Create asset events request.' } as const; export const $DAGCollectionResponse = { - properties: { - dags: { - items: { - $ref: "#/components/schemas/DAGResponse", - }, - type: "array", - title: "Dags", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + dags: { + items: { + '$ref': '#/components/schemas/DAGResponse' + }, + type: 'array', + title: 'Dags' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["dags", "total_entries"], - title: "DAGCollectionResponse", - description: "DAG Collection serializer for responses.", + type: 'object', + required: ['dags', 'total_entries'], + title: 'DAGCollectionResponse', + description: 'DAG Collection serializer for responses.' } as const; export const $DAGDetailsResponse = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - dag_display_name: { - type: "string", - title: "Dag Display Name", - }, - is_paused: { - type: "boolean", - title: "Is Paused", - }, - is_stale: { - type: "boolean", - title: "Is Stale", - }, - last_parsed_time: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' }, - { - type: "null", + dag_display_name: { + type: 'string', + title: 'Dag Display Name' }, - ], - title: "Last Parsed Time", - }, - last_expired: { - anyOf: [ - { - type: "string", - format: "date-time", + is_paused: { + type: 'boolean', + title: 'Is Paused' }, - { - type: "null", + is_stale: { + type: 'boolean', + title: 'Is Stale' }, - ], - title: "Last Expired", - }, - bundle_name: { - anyOf: [ - { - type: "string", + last_parsed_time: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Parsed Time' }, - { - type: "null", + last_expired: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Expired' }, - ], - title: "Bundle Name", - }, - relative_fileloc: { - anyOf: [ - { - type: "string", + bundle_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Name' }, - { - type: "null", + bundle_version: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Version' }, - ], - title: "Relative Fileloc", - }, - fileloc: { - type: "string", - title: "Fileloc", - }, - description: { - anyOf: [ - { - type: "string", + relative_fileloc: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Relative Fileloc' }, - { - type: "null", + fileloc: { + type: 'string', + title: 'Fileloc' }, - ], - title: "Description", - }, - timetable_summary: { - anyOf: [ - { - type: "string", + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' }, - { - type: "null", + timetable_summary: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Timetable Summary' }, - ], - title: "Timetable Summary", - }, - timetable_description: { - anyOf: [ - { - type: "string", + timetable_description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Timetable Description' }, - { - type: "null", - }, - ], - title: "Timetable Description", - }, - tags: { - items: { - $ref: "#/components/schemas/DagTagResponse", - }, - type: "array", - title: "Tags", - }, - max_active_tasks: { - type: "integer", - title: "Max Active Tasks", - }, - max_active_runs: { - anyOf: [ - { - type: "integer", + tags: { + items: { + '$ref': '#/components/schemas/DagTagResponse' + }, + type: 'array', + title: 'Tags' }, - { - type: "null", + max_active_tasks: { + type: 'integer', + title: 'Max Active Tasks' }, - ], - title: "Max Active Runs", - }, - max_consecutive_failed_dag_runs: { - type: "integer", - title: "Max Consecutive Failed Dag Runs", - }, - has_task_concurrency_limits: { - type: "boolean", - title: "Has Task Concurrency Limits", - }, - has_import_errors: { - type: "boolean", - title: "Has Import Errors", - }, - next_dagrun_logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", + max_active_runs: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Max Active Runs' }, - { - type: "null", + max_consecutive_failed_dag_runs: { + type: 'integer', + title: 'Max Consecutive Failed Dag Runs' }, - ], - title: "Next Dagrun Logical Date", - }, - next_dagrun_data_interval_start: { - anyOf: [ - { - type: "string", - format: "date-time", + has_task_concurrency_limits: { + type: 'boolean', + title: 'Has Task Concurrency Limits' }, - { - type: "null", + has_import_errors: { + type: 'boolean', + title: 'Has Import Errors' }, - ], - title: "Next Dagrun Data Interval Start", - }, - next_dagrun_data_interval_end: { - anyOf: [ - { - type: "string", - format: "date-time", + next_dagrun_logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Logical Date' }, - { - type: "null", + next_dagrun_data_interval_start: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Data Interval Start' }, - ], - title: "Next Dagrun Data Interval End", - }, - next_dagrun_run_after: { - anyOf: [ - { - type: "string", - format: "date-time", + next_dagrun_data_interval_end: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Data Interval End' }, - { - type: "null", + next_dagrun_run_after: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Run After' }, - ], - title: "Next Dagrun Run After", - }, - owners: { - items: { - type: "string", - }, - type: "array", - title: "Owners", - }, - catchup: { - type: "boolean", - title: "Catchup", - }, - dag_run_timeout: { - anyOf: [ - { - type: "string", - format: "duration", - }, - { - type: "null", - }, - ], - title: "Dag Run Timeout", - }, - asset_expression: { - anyOf: [ - { - additionalProperties: true, - type: "object", + owners: { + items: { + type: 'string' + }, + type: 'array', + title: 'Owners' }, - { - type: "null", + catchup: { + type: 'boolean', + title: 'Catchup' }, - ], - title: "Asset Expression", - }, - doc_md: { - anyOf: [ - { - type: "string", + dag_run_timeout: { + anyOf: [ + { + type: 'string', + format: 'duration' + }, + { + type: 'null' + } + ], + title: 'Dag Run Timeout' }, - { - type: "null", - }, - ], - title: "Doc Md", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Start Date", + asset_expression: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Asset Expression' + }, + doc_md: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Doc Md' + }, + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' + }, + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' + }, + is_paused_upon_creation: { + anyOf: [ + { + type: 'boolean' + }, + { + type: 'null' + } + ], + title: 'Is Paused Upon Creation' + }, + params: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Params' + }, + render_template_as_native_obj: { + type: 'boolean', + title: 'Render Template As Native Obj' + }, + template_search_path: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Template Search Path' + }, + timezone: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Timezone' + }, + last_parsed: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Parsed' + }, + default_args: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Default Args' + }, + owner_links: { + anyOf: [ + { + additionalProperties: { + type: 'string' + }, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Owner Links' + }, + file_token: { + type: 'string', + title: 'File Token', + description: 'Return file token.', + readOnly: true + }, + concurrency: { + type: 'integer', + title: 'Concurrency', + description: 'Return max_active_tasks as concurrency.', + readOnly: true + }, + latest_dag_version: { + anyOf: [ + { + '$ref': '#/components/schemas/DagVersionResponse' + }, + { + type: 'null' + } + ], + description: 'Return the latest DagVersion.', + readOnly: true + } }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date", - }, - is_paused_upon_creation: { - anyOf: [ - { - type: "boolean", - }, - { - type: "null", - }, - ], - title: "Is Paused Upon Creation", - }, - params: { - anyOf: [ - { - additionalProperties: true, - type: "object", - }, - { - type: "null", - }, - ], - title: "Params", - }, - render_template_as_native_obj: { - type: "boolean", - title: "Render Template As Native Obj", - }, - template_search_path: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Template Search Path", - }, - timezone: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Timezone", - }, - last_parsed: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Last Parsed", - }, - file_token: { - type: "string", - title: "File Token", - description: "Return file token.", - readOnly: true, - }, - concurrency: { - type: "integer", - title: "Concurrency", - description: "Return max_active_tasks as concurrency.", - readOnly: true, - }, - latest_dag_version: { - anyOf: [ - { - $ref: "#/components/schemas/DagVersionResponse", - }, - { - type: "null", - }, - ], - description: "Return the latest DagVersion.", - readOnly: true, - }, - }, - type: "object", - required: [ - "dag_id", - "dag_display_name", - "is_paused", - "is_stale", - "last_parsed_time", - "last_expired", - "bundle_name", - "relative_fileloc", - "fileloc", - "description", - "timetable_summary", - "timetable_description", - "tags", - "max_active_tasks", - "max_active_runs", - "max_consecutive_failed_dag_runs", - "has_task_concurrency_limits", - "has_import_errors", - "next_dagrun_logical_date", - "next_dagrun_data_interval_start", - "next_dagrun_data_interval_end", - "next_dagrun_run_after", - "owners", - "catchup", - "dag_run_timeout", - "asset_expression", - "doc_md", - "start_date", - "end_date", - "is_paused_upon_creation", - "params", - "render_template_as_native_obj", - "template_search_path", - "timezone", - "last_parsed", - "file_token", - "concurrency", - "latest_dag_version", - ], - title: "DAGDetailsResponse", - description: "Specific serializer for DAG Details responses.", + type: 'object', + required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale', 'last_parsed_time', 'last_expired', 'bundle_name', 'bundle_version', 'relative_fileloc', 'fileloc', 'description', 'timetable_summary', 'timetable_description', 'tags', 'max_active_tasks', 'max_active_runs', 'max_consecutive_failed_dag_runs', 'has_task_concurrency_limits', 'has_import_errors', 'next_dagrun_logical_date', 'next_dagrun_data_interval_start', 'next_dagrun_data_interval_end', 'next_dagrun_run_after', 'owners', 'catchup', 'dag_run_timeout', 'asset_expression', 'doc_md', 'start_date', 'end_date', 'is_paused_upon_creation', 'params', 'render_template_as_native_obj', 'template_search_path', 'timezone', 'last_parsed', 'default_args', 'file_token', 'concurrency', 'latest_dag_version'], + title: 'DAGDetailsResponse', + description: 'Specific serializer for DAG Details responses.' } as const; export const $DAGPatchBody = { - properties: { - is_paused: { - type: "boolean", - title: "Is Paused", + properties: { + is_paused: { + type: 'boolean', + title: 'Is Paused' + } }, - }, - additionalProperties: false, - type: "object", - required: ["is_paused"], - title: "DAGPatchBody", - description: "Dag Serializer for updatable bodies.", + additionalProperties: false, + type: 'object', + required: ['is_paused'], + title: 'DAGPatchBody', + description: 'Dag Serializer for updatable bodies.' } as const; export const $DAGResponse = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - dag_display_name: { - type: "string", - title: "Dag Display Name", - }, - is_paused: { - type: "boolean", - title: "Is Paused", - }, - is_stale: { - type: "boolean", - title: "Is Stale", - }, - last_parsed_time: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' }, - { - type: "null", + dag_display_name: { + type: 'string', + title: 'Dag Display Name' }, - ], - title: "Last Parsed Time", - }, - last_expired: { - anyOf: [ - { - type: "string", - format: "date-time", + is_paused: { + type: 'boolean', + title: 'Is Paused' }, - { - type: "null", + is_stale: { + type: 'boolean', + title: 'Is Stale' }, - ], - title: "Last Expired", - }, - bundle_name: { - anyOf: [ - { - type: "string", + last_parsed_time: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Parsed Time' + }, + last_expired: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Expired' }, - { - type: "null", + bundle_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Name' }, - ], - title: "Bundle Name", - }, - relative_fileloc: { - anyOf: [ - { - type: "string", + bundle_version: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Version' }, - { - type: "null", + relative_fileloc: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Relative Fileloc' }, - ], - title: "Relative Fileloc", - }, - fileloc: { - type: "string", - title: "Fileloc", - }, - description: { - anyOf: [ - { - type: "string", + fileloc: { + type: 'string', + title: 'Fileloc' }, - { - type: "null", + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' }, - ], - title: "Description", - }, - timetable_summary: { - anyOf: [ - { - type: "string", + timetable_summary: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Timetable Summary' + }, + timetable_description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Timetable Description' + }, + tags: { + items: { + '$ref': '#/components/schemas/DagTagResponse' + }, + type: 'array', + title: 'Tags' + }, + max_active_tasks: { + type: 'integer', + title: 'Max Active Tasks' + }, + max_active_runs: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Max Active Runs' }, - { - type: "null", + max_consecutive_failed_dag_runs: { + type: 'integer', + title: 'Max Consecutive Failed Dag Runs' }, - ], - title: "Timetable Summary", + has_task_concurrency_limits: { + type: 'boolean', + title: 'Has Task Concurrency Limits' + }, + has_import_errors: { + type: 'boolean', + title: 'Has Import Errors' + }, + next_dagrun_logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Logical Date' + }, + next_dagrun_data_interval_start: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Data Interval Start' + }, + next_dagrun_data_interval_end: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Data Interval End' + }, + next_dagrun_run_after: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Run After' + }, + owners: { + items: { + type: 'string' + }, + type: 'array', + title: 'Owners' + }, + file_token: { + type: 'string', + title: 'File Token', + description: 'Return file token.', + readOnly: true + } }, - timetable_description: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Timetable Description", - }, - tags: { - items: { - $ref: "#/components/schemas/DagTagResponse", - }, - type: "array", - title: "Tags", - }, - max_active_tasks: { - type: "integer", - title: "Max Active Tasks", - }, - max_active_runs: { - anyOf: [ - { - type: "integer", - }, - { - type: "null", - }, - ], - title: "Max Active Runs", - }, - max_consecutive_failed_dag_runs: { - type: "integer", - title: "Max Consecutive Failed Dag Runs", - }, - has_task_concurrency_limits: { - type: "boolean", - title: "Has Task Concurrency Limits", - }, - has_import_errors: { - type: "boolean", - title: "Has Import Errors", - }, - next_dagrun_logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Logical Date", - }, - next_dagrun_data_interval_start: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Data Interval Start", - }, - next_dagrun_data_interval_end: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Data Interval End", - }, - next_dagrun_run_after: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Run After", - }, - owners: { - items: { - type: "string", - }, - type: "array", - title: "Owners", - }, - file_token: { - type: "string", - title: "File Token", - description: "Return file token.", - readOnly: true, - }, - }, - type: "object", - required: [ - "dag_id", - "dag_display_name", - "is_paused", - "is_stale", - "last_parsed_time", - "last_expired", - "bundle_name", - "relative_fileloc", - "fileloc", - "description", - "timetable_summary", - "timetable_description", - "tags", - "max_active_tasks", - "max_active_runs", - "max_consecutive_failed_dag_runs", - "has_task_concurrency_limits", - "has_import_errors", - "next_dagrun_logical_date", - "next_dagrun_data_interval_start", - "next_dagrun_data_interval_end", - "next_dagrun_run_after", - "owners", - "file_token", - ], - title: "DAGResponse", - description: "DAG serializer for responses.", + type: 'object', + required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale', 'last_parsed_time', 'last_expired', 'bundle_name', 'bundle_version', 'relative_fileloc', 'fileloc', 'description', 'timetable_summary', 'timetable_description', 'tags', 'max_active_tasks', 'max_active_runs', 'max_consecutive_failed_dag_runs', 'has_task_concurrency_limits', 'has_import_errors', 'next_dagrun_logical_date', 'next_dagrun_data_interval_start', 'next_dagrun_data_interval_end', 'next_dagrun_run_after', 'owners', 'file_token'], + title: 'DAGResponse', + description: 'DAG serializer for responses.' } as const; export const $DAGRunClearBody = { - properties: { - dry_run: { - type: "boolean", - title: "Dry Run", - default: true, - }, - only_failed: { - type: "boolean", - title: "Only Failed", - default: false, - }, - }, - additionalProperties: false, - type: "object", - title: "DAGRunClearBody", - description: "DAG Run serializer for clear endpoint body.", + properties: { + dry_run: { + type: 'boolean', + title: 'Dry Run', + default: true + }, + only_failed: { + type: 'boolean', + title: 'Only Failed', + default: false + }, + run_on_latest_version: { + type: 'boolean', + title: 'Run On Latest Version', + description: '(Experimental) Run on the latest bundle version of the Dag after clearing the Dag Run.', + default: false + } + }, + additionalProperties: false, + type: 'object', + title: 'DAGRunClearBody', + description: 'DAG Run serializer for clear endpoint body.' } as const; export const $DAGRunCollectionResponse = { - properties: { - dag_runs: { - items: { - $ref: "#/components/schemas/DAGRunResponse", - }, - type: "array", - title: "Dag Runs", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + dag_runs: { + items: { + '$ref': '#/components/schemas/DAGRunResponse' + }, + type: 'array', + title: 'Dag Runs' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["dag_runs", "total_entries"], - title: "DAGRunCollectionResponse", - description: "DAG Run Collection serializer for responses.", + type: 'object', + required: ['dag_runs', 'total_entries'], + title: 'DAGRunCollectionResponse', + description: 'DAG Run Collection serializer for responses.' } as const; export const $DAGRunPatchBody = { - properties: { - state: { - anyOf: [ - { - $ref: "#/components/schemas/DAGRunPatchStates", - }, - { - type: "null", - }, - ], - }, - note: { - anyOf: [ - { - type: "string", - maxLength: 1000, - }, - { - type: "null", - }, - ], - title: "Note", + properties: { + state: { + anyOf: [ + { + '$ref': '#/components/schemas/DAGRunPatchStates' + }, + { + type: 'null' + } + ] + }, + note: { + anyOf: [ + { + type: 'string', + maxLength: 1000 + }, + { + type: 'null' + } + ], + title: 'Note' + } }, - }, - additionalProperties: false, - type: "object", - title: "DAGRunPatchBody", - description: "DAG Run Serializer for PATCH requests.", + additionalProperties: false, + type: 'object', + title: 'DAGRunPatchBody', + description: 'DAG Run Serializer for PATCH requests.' } as const; export const $DAGRunPatchStates = { - type: "string", - enum: ["queued", "success", "failed"], - title: "DAGRunPatchStates", - description: "Enum for DAG Run states when updating a DAG Run.", + type: 'string', + enum: ['queued', 'success', 'failed'], + title: 'DAGRunPatchStates', + description: 'Enum for DAG Run states when updating a DAG Run.' } as const; export const $DAGRunResponse = { - properties: { - dag_run_id: { - type: "string", - title: "Dag Run Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + dag_run_id: { + type: 'string', + title: 'Dag Run Id' }, - { - type: "null", + dag_id: { + type: 'string', + title: 'Dag Id' }, - ], - title: "Logical Date", - }, - queued_at: { - anyOf: [ - { - type: "string", - format: "date-time", + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' }, - { - type: "null", + queued_at: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Queued At' }, - ], - title: "Queued At", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' + }, + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' + }, + duration: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Duration' + }, + data_interval_start: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Data Interval Start' }, - { - type: "null", - }, - ], - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date", - }, - data_interval_start: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Data Interval Start", - }, - data_interval_end: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Data Interval End", - }, - run_after: { - type: "string", - format: "date-time", - title: "Run After", - }, - last_scheduling_decision: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Last Scheduling Decision", - }, - run_type: { - $ref: "#/components/schemas/DagRunType", - }, - state: { - $ref: "#/components/schemas/DagRunState", - }, - triggered_by: { - anyOf: [ - { - $ref: "#/components/schemas/DagRunTriggeredByType", - }, - { - type: "null", - }, - ], - }, - conf: { - additionalProperties: true, - type: "object", - title: "Conf", - }, - note: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Note", - }, - dag_versions: { - items: { - $ref: "#/components/schemas/DagVersionResponse", - }, - type: "array", - title: "Dag Versions", - }, - }, - type: "object", - required: [ - "dag_run_id", - "dag_id", - "logical_date", - "queued_at", - "start_date", - "end_date", - "data_interval_start", - "data_interval_end", - "run_after", - "last_scheduling_decision", - "run_type", - "state", - "triggered_by", - "conf", - "note", - "dag_versions", - ], - title: "DAGRunResponse", - description: "DAG Run serializer for responses.", + data_interval_end: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Data Interval End' + }, + run_after: { + type: 'string', + format: 'date-time', + title: 'Run After' + }, + last_scheduling_decision: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Scheduling Decision' + }, + run_type: { + '$ref': '#/components/schemas/DagRunType' + }, + state: { + '$ref': '#/components/schemas/DagRunState' + }, + triggered_by: { + anyOf: [ + { + '$ref': '#/components/schemas/DagRunTriggeredByType' + }, + { + type: 'null' + } + ] + }, + triggering_user_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Triggering User Name' + }, + conf: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Conf' + }, + note: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Note' + }, + dag_versions: { + items: { + '$ref': '#/components/schemas/DagVersionResponse' + }, + type: 'array', + title: 'Dag Versions' + }, + bundle_version: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Version' + }, + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + } + }, + type: 'object', + required: ['dag_run_id', 'dag_id', 'logical_date', 'queued_at', 'start_date', 'end_date', 'duration', 'data_interval_start', 'data_interval_end', 'run_after', 'last_scheduling_decision', 'run_type', 'state', 'triggered_by', 'triggering_user_name', 'conf', 'note', 'dag_versions', 'bundle_version', 'dag_display_name'], + title: 'DAGRunResponse', + description: 'DAG Run serializer for responses.' } as const; export const $DAGRunsBatchBody = { - properties: { - order_by: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Order By", - }, - page_offset: { - type: "integer", - minimum: 0, - title: "Page Offset", - default: 0, - }, - page_limit: { - type: "integer", - minimum: 0, - title: "Page Limit", - default: 100, - }, - dag_ids: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Dag Ids", - }, - states: { - anyOf: [ - { - items: { - anyOf: [ - { - $ref: "#/components/schemas/DagRunState", - }, - { - type: "null", - }, - ], - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "States", - }, - run_after_gte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Run After Gte", - }, - run_after_lte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Run After Lte", - }, - logical_date_gte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Logical Date Gte", - }, - logical_date_lte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", + properties: { + order_by: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Order By' }, - ], - title: "Logical Date Lte", - }, - start_date_gte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", + page_offset: { + type: 'integer', + minimum: 0, + title: 'Page Offset', + default: 0 }, - ], - title: "Start Date Gte", - }, - start_date_lte: { - anyOf: [ - { - type: "string", - format: "date-time", + page_limit: { + type: 'integer', + minimum: 0, + title: 'Page Limit', + default: 100 }, - { - type: "null", + dag_ids: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Dag Ids' }, - ], - title: "Start Date Lte", - }, - end_date_gte: { - anyOf: [ - { - type: "string", - format: "date-time", + states: { + anyOf: [ + { + items: { + anyOf: [ + { + '$ref': '#/components/schemas/DagRunState' + }, + { + type: 'null' + } + ] + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'States' }, - { - type: "null", + run_after_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Run After Gte' }, - ], - title: "End Date Gte", - }, - end_date_lte: { - anyOf: [ - { - type: "string", - format: "date-time", + run_after_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Run After Lte' }, - { - type: "null", + logical_date_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date Gte' + }, + logical_date_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date Lte' + }, + start_date_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date Gte' + }, + start_date_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date Lte' }, - ], - title: "End Date Lte", + end_date_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date Gte' + }, + end_date_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date Lte' + } }, - }, - additionalProperties: false, - type: "object", - title: "DAGRunsBatchBody", - description: "List DAG Runs body for batch endpoint.", + additionalProperties: false, + type: 'object', + title: 'DAGRunsBatchBody', + description: 'List DAG Runs body for batch endpoint.' } as const; export const $DAGSourceResponse = { - properties: { - content: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + properties: { + content: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Content' }, - ], - title: "Content", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - version_number: { - anyOf: [ - { - type: "integer", + dag_id: { + type: 'string', + title: 'Dag Id' }, - { - type: "null", + version_number: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Version Number' }, - ], - title: "Version Number", + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + } }, - }, - type: "object", - required: ["content", "dag_id", "version_number"], - title: "DAGSourceResponse", - description: "DAG Source serializer for responses.", + type: 'object', + required: ['content', 'dag_id', 'version_number', 'dag_display_name'], + title: 'DAGSourceResponse', + description: 'DAG Source serializer for responses.' } as const; export const $DAGTagCollectionResponse = { - properties: { - tags: { - items: { - type: "string", - }, - type: "array", - title: "Tags", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + tags: { + items: { + type: 'string' + }, + type: 'array', + title: 'Tags' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["tags", "total_entries"], - title: "DAGTagCollectionResponse", - description: "DAG Tags Collection serializer for responses.", + type: 'object', + required: ['tags', 'total_entries'], + title: 'DAGTagCollectionResponse', + description: 'DAG Tags Collection serializer for responses.' } as const; export const $DAGVersionCollectionResponse = { - properties: { - dag_versions: { - items: { - $ref: "#/components/schemas/DagVersionResponse", - }, - type: "array", - title: "Dag Versions", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + dag_versions: { + items: { + '$ref': '#/components/schemas/DagVersionResponse' + }, + type: 'array', + title: 'Dag Versions' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["dag_versions", "total_entries"], - title: "DAGVersionCollectionResponse", - description: "DAG Version Collection serializer for responses.", + type: 'object', + required: ['dag_versions', 'total_entries'], + title: 'DAGVersionCollectionResponse', + description: 'DAG Version Collection serializer for responses.' } as const; export const $DAGWarningCollectionResponse = { - properties: { - dag_warnings: { - items: { - $ref: "#/components/schemas/DAGWarningResponse", - }, - type: "array", - title: "Dag Warnings", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + dag_warnings: { + items: { + '$ref': '#/components/schemas/DAGWarningResponse' + }, + type: 'array', + title: 'Dag Warnings' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["dag_warnings", "total_entries"], - title: "DAGWarningCollectionResponse", - description: "DAG warning collection serializer for responses.", + type: 'object', + required: ['dag_warnings', 'total_entries'], + title: 'DAGWarningCollectionResponse', + description: 'DAG warning collection serializer for responses.' } as const; export const $DAGWarningResponse = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - warning_type: { - $ref: "#/components/schemas/DagWarningType", - }, - message: { - type: "string", - title: "Message", - }, - timestamp: { - type: "string", - format: "date-time", - title: "Timestamp", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' + }, + warning_type: { + '$ref': '#/components/schemas/DagWarningType' + }, + message: { + type: 'string', + title: 'Message' + }, + timestamp: { + type: 'string', + format: 'date-time', + title: 'Timestamp' + } }, - }, - type: "object", - required: ["dag_id", "warning_type", "message", "timestamp"], - title: "DAGWarningResponse", - description: "DAG Warning serializer for responses.", + type: 'object', + required: ['dag_id', 'warning_type', 'message', 'timestamp'], + title: 'DAGWarningResponse', + description: 'DAG Warning serializer for responses.' } as const; export const $DagProcessorInfoResponse = { - properties: { - status: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Status", - }, - latest_dag_processor_heartbeat: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + properties: { + status: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Status' }, - ], - title: "Latest Dag Processor Heartbeat", + latest_dag_processor_heartbeat: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Latest Dag Processor Heartbeat' + } }, - }, - type: "object", - required: ["status", "latest_dag_processor_heartbeat"], - title: "DagProcessorInfoResponse", - description: "DagProcessor info serializer for responses.", + type: 'object', + required: ['status', 'latest_dag_processor_heartbeat'], + title: 'DagProcessorInfoResponse', + description: 'DagProcessor info serializer for responses.' } as const; export const $DagRunAssetReference = { - properties: { - run_id: { - type: "string", - title: "Run Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Logical Date", - }, - start_date: { - type: "string", - format: "date-time", - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date", - }, - state: { - type: "string", - title: "State", - }, - data_interval_start: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Data Interval Start", - }, - data_interval_end: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Data Interval End", - }, - }, - additionalProperties: false, - type: "object", - required: [ - "run_id", - "dag_id", - "logical_date", - "start_date", - "end_date", - "state", - "data_interval_start", - "data_interval_end", - ], - title: "DagRunAssetReference", - description: "DAGRun serializer for asset responses.", + properties: { + run_id: { + type: 'string', + title: 'Run Id' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + }, + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' + }, + start_date: { + type: 'string', + format: 'date-time', + title: 'Start Date' + }, + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' + }, + state: { + type: 'string', + title: 'State' + }, + data_interval_start: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Data Interval Start' + }, + data_interval_end: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Data Interval End' + } + }, + additionalProperties: false, + type: 'object', + required: ['run_id', 'dag_id', 'logical_date', 'start_date', 'end_date', 'state', 'data_interval_start', 'data_interval_end'], + title: 'DagRunAssetReference', + description: 'DAGRun serializer for asset responses.' } as const; export const $DagRunState = { - type: "string", - enum: ["queued", "running", "success", "failed"], - title: "DagRunState", - description: `All possible states that a DagRun can be in. + type: 'string', + enum: ['queued', 'running', 'success', 'failed'], + title: 'DagRunState', + description: `All possible states that a DagRun can be in. These are "shared" with TaskInstanceState in some parts of the code, so please ensure that their values always match the ones with the -same name in TaskInstanceState.`, +same name in TaskInstanceState.` } as const; export const $DagRunTriggeredByType = { - type: "string", - enum: ["cli", "operator", "rest_api", "ui", "test", "timetable", "asset", "backfill"], - title: "DagRunTriggeredByType", - description: "Class with TriggeredBy types for DagRun.", + type: 'string', + enum: ['cli', 'operator', 'rest_api', 'ui', 'test', 'timetable', 'asset', 'backfill'], + title: 'DagRunTriggeredByType', + description: 'Class with TriggeredBy types for DagRun.' } as const; export const $DagRunType = { - type: "string", - enum: ["backfill", "scheduled", "manual", "asset_triggered"], - title: "DagRunType", - description: "Class with DagRun types.", + type: 'string', + enum: ['backfill', 'scheduled', 'manual', 'asset_triggered'], + title: 'DagRunType', + description: 'Class with DagRun types.' } as const; export const $DagScheduleAssetReference = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - created_at: { - type: "string", - format: "date-time", - title: "Created At", - }, - updated_at: { - type: "string", - format: "date-time", - title: "Updated At", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' + }, + created_at: { + type: 'string', + format: 'date-time', + title: 'Created At' + }, + updated_at: { + type: 'string', + format: 'date-time', + title: 'Updated At' + } }, - }, - additionalProperties: false, - type: "object", - required: ["dag_id", "created_at", "updated_at"], - title: "DagScheduleAssetReference", - description: "DAG schedule reference serializer for assets.", + additionalProperties: false, + type: 'object', + required: ['dag_id', 'created_at', 'updated_at'], + title: 'DagScheduleAssetReference', + description: 'DAG schedule reference serializer for assets.' } as const; export const $DagStatsCollectionResponse = { - properties: { - dags: { - items: { - $ref: "#/components/schemas/DagStatsResponse", - }, - type: "array", - title: "Dags", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + dags: { + items: { + '$ref': '#/components/schemas/DagStatsResponse' + }, + type: 'array', + title: 'Dags' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["dags", "total_entries"], - title: "DagStatsCollectionResponse", - description: "DAG Stats Collection serializer for responses.", + type: 'object', + required: ['dags', 'total_entries'], + title: 'DagStatsCollectionResponse', + description: 'DAG Stats Collection serializer for responses.' } as const; export const $DagStatsResponse = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - stats: { - items: { - $ref: "#/components/schemas/DagStatsStateResponse", - }, - type: "array", - title: "Stats", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' + }, + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + }, + stats: { + items: { + '$ref': '#/components/schemas/DagStatsStateResponse' + }, + type: 'array', + title: 'Stats' + } }, - }, - type: "object", - required: ["dag_id", "stats"], - title: "DagStatsResponse", - description: "DAG Stats serializer for responses.", + type: 'object', + required: ['dag_id', 'dag_display_name', 'stats'], + title: 'DagStatsResponse', + description: 'DAG Stats serializer for responses.' } as const; export const $DagStatsStateResponse = { - properties: { - state: { - $ref: "#/components/schemas/DagRunState", - }, - count: { - type: "integer", - title: "Count", + properties: { + state: { + '$ref': '#/components/schemas/DagRunState' + }, + count: { + type: 'integer', + title: 'Count' + } }, - }, - type: "object", - required: ["state", "count"], - title: "DagStatsStateResponse", - description: "DagStatsState serializer for responses.", + type: 'object', + required: ['state', 'count'], + title: 'DagStatsStateResponse', + description: 'DagStatsState serializer for responses.' } as const; export const $DagTagResponse = { - properties: { - name: { - type: "string", - title: "Name", - }, - dag_id: { - type: "string", - title: "Dag Id", + properties: { + name: { + type: 'string', + title: 'Name' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + } }, - }, - type: "object", - required: ["name", "dag_id"], - title: "DagTagResponse", - description: "DAG Tag serializer for responses.", + type: 'object', + required: ['name', 'dag_id'], + title: 'DagTagResponse', + description: 'DAG Tag serializer for responses.' } as const; export const $DagVersionResponse = { - properties: { - id: { - type: "string", - format: "uuid", - title: "Id", - }, - version_number: { - type: "integer", - title: "Version Number", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - bundle_name: { - anyOf: [ - { - type: "string", + properties: { + id: { + type: 'string', + format: 'uuid', + title: 'Id' }, - { - type: "null", + version_number: { + type: 'integer', + title: 'Version Number' }, - ], - title: "Bundle Name", - }, - bundle_version: { - anyOf: [ - { - type: "string", + dag_id: { + type: 'string', + title: 'Dag Id' + }, + bundle_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Name' }, - { - type: "null", + bundle_version: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Version' }, - ], - title: "Bundle Version", - }, - created_at: { - type: "string", - format: "date-time", - title: "Created At", - }, - bundle_url: { - anyOf: [ - { - type: "string", + created_at: { + type: 'string', + format: 'date-time', + title: 'Created At' }, - { - type: "null", + dag_display_name: { + type: 'string', + title: 'Dag Display Name' }, - ], - title: "Bundle Url", - readOnly: true, + bundle_url: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Url', + readOnly: true + } }, - }, - type: "object", - required: ["id", "version_number", "dag_id", "bundle_name", "bundle_version", "created_at", "bundle_url"], - title: "DagVersionResponse", - description: "Dag Version serializer for responses.", + type: 'object', + required: ['id', 'version_number', 'dag_id', 'bundle_name', 'bundle_version', 'created_at', 'dag_display_name', 'bundle_url'], + title: 'DagVersionResponse', + description: 'Dag Version serializer for responses.' } as const; export const $DagWarningType = { - type: "string", - enum: ["asset conflict", "non-existent pool"], - title: "DagWarningType", - description: `Enum for DAG warning types. + type: 'string', + enum: ['asset conflict', 'non-existent pool'], + title: 'DagWarningType', + description: `Enum for DAG warning types. This is the set of allowable values for the \`\`warning_type\`\` field -in the DagWarning model.`, +in the DagWarning model.` } as const; export const $DryRunBackfillCollectionResponse = { - properties: { - backfills: { - items: { - $ref: "#/components/schemas/DryRunBackfillResponse", - }, - type: "array", - title: "Backfills", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + backfills: { + items: { + '$ref': '#/components/schemas/DryRunBackfillResponse' + }, + type: 'array', + title: 'Backfills' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["backfills", "total_entries"], - title: "DryRunBackfillCollectionResponse", - description: "Backfill collection serializer for responses in dry-run mode.", + type: 'object', + required: ['backfills', 'total_entries'], + title: 'DryRunBackfillCollectionResponse', + description: 'Backfill collection serializer for responses in dry-run mode.' } as const; export const $DryRunBackfillResponse = { - properties: { - logical_date: { - type: "string", - format: "date-time", - title: "Logical Date", + properties: { + logical_date: { + type: 'string', + format: 'date-time', + title: 'Logical Date' + } }, - }, - type: "object", - required: ["logical_date"], - title: "DryRunBackfillResponse", - description: "Backfill serializer for responses in dry-run mode.", + type: 'object', + required: ['logical_date'], + title: 'DryRunBackfillResponse', + description: 'Backfill serializer for responses in dry-run mode.' } as const; export const $EventLogCollectionResponse = { - properties: { - event_logs: { - items: { - $ref: "#/components/schemas/EventLogResponse", - }, - type: "array", - title: "Event Logs", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + event_logs: { + items: { + '$ref': '#/components/schemas/EventLogResponse' + }, + type: 'array', + title: 'Event Logs' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["event_logs", "total_entries"], - title: "EventLogCollectionResponse", - description: "Event Log Collection Response.", + type: 'object', + required: ['event_logs', 'total_entries'], + title: 'EventLogCollectionResponse', + description: 'Event Log Collection Response.' } as const; export const $EventLogResponse = { - properties: { - event_log_id: { - type: "integer", - title: "Event Log Id", + properties: { + event_log_id: { + type: 'integer', + title: 'Event Log Id' + }, + when: { + type: 'string', + format: 'date-time', + title: 'When' + }, + dag_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Dag Id' + }, + task_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Task Id' + }, + run_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Run Id' + }, + map_index: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Map Index' + }, + try_number: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Try Number' + }, + event: { + type: 'string', + title: 'Event' + }, + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' + }, + owner: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Owner' + }, + extra: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Extra' + }, + dag_display_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Dag Display Name' + } }, - when: { - type: "string", - format: "date-time", - title: "When", + type: 'object', + required: ['event_log_id', 'when', 'dag_id', 'task_id', 'run_id', 'map_index', 'try_number', 'event', 'logical_date', 'owner', 'extra'], + title: 'EventLogResponse', + description: 'Event Log Response.' +} as const; + +export const $ExternalLogUrlResponse = { + properties: { + url: { + type: 'string', + title: 'Url' + } }, - dag_id: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Dag Id", - }, - task_id: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Task Id", - }, - run_id: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Run Id", - }, - map_index: { - anyOf: [ - { - type: "integer", - }, - { - type: "null", - }, - ], - title: "Map Index", - }, - try_number: { - anyOf: [ - { - type: "integer", - }, - { - type: "null", - }, - ], - title: "Try Number", - }, - event: { - type: "string", - title: "Event", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Logical Date", - }, - owner: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Owner", - }, - extra: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Extra", - }, - }, - type: "object", - required: [ - "event_log_id", - "when", - "dag_id", - "task_id", - "run_id", - "map_index", - "try_number", - "event", - "logical_date", - "owner", - "extra", - ], - title: "EventLogResponse", - description: "Event Log Response.", + type: 'object', + required: ['url'], + title: 'ExternalLogUrlResponse', + description: 'Response for the external log URL endpoint.' +} as const; + +export const $ExternalViewResponse = { + properties: { + name: { + type: 'string', + title: 'Name' + }, + icon: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Icon' + }, + icon_dark_mode: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Icon Dark Mode' + }, + url_route: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Url Route' + }, + category: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Category' + }, + href: { + type: 'string', + title: 'Href' + }, + destination: { + type: 'string', + enum: ['nav', 'dag', 'dag_run', 'task', 'task_instance'], + title: 'Destination', + default: 'nav' + } + }, + additionalProperties: true, + type: 'object', + required: ['name', 'href'], + title: 'ExternalViewResponse', + description: 'Serializer for External View Plugin responses.' } as const; export const $ExtraLinkCollectionResponse = { - properties: { - extra_links: { - additionalProperties: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - }, - type: "object", - title: "Extra Links", - }, - total_entries: { - type: "integer", - title: "Total Entries", - }, - }, - type: "object", - required: ["extra_links", "total_entries"], - title: "ExtraLinkCollectionResponse", - description: "Extra Links Response.", + properties: { + extra_links: { + additionalProperties: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ] + }, + type: 'object', + title: 'Extra Links' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } + }, + type: 'object', + required: ['extra_links', 'total_entries'], + title: 'ExtraLinkCollectionResponse', + description: 'Extra Links Response.' } as const; export const $FastAPIAppResponse = { - properties: { - app: { - type: "string", - title: "App", - }, - url_prefix: { - type: "string", - title: "Url Prefix", - }, - name: { - type: "string", - title: "Name", + properties: { + app: { + type: 'string', + title: 'App' + }, + url_prefix: { + type: 'string', + title: 'Url Prefix' + }, + name: { + type: 'string', + title: 'Name' + } }, - }, - additionalProperties: true, - type: "object", - required: ["app", "url_prefix", "name"], - title: "FastAPIAppResponse", - description: "Serializer for Plugin FastAPI App responses.", + additionalProperties: true, + type: 'object', + required: ['app', 'url_prefix', 'name'], + title: 'FastAPIAppResponse', + description: 'Serializer for Plugin FastAPI App responses.' } as const; export const $FastAPIRootMiddlewareResponse = { - properties: { - middleware: { - type: "string", - title: "Middleware", - }, - name: { - type: "string", - title: "Name", + properties: { + middleware: { + type: 'string', + title: 'Middleware' + }, + name: { + type: 'string', + title: 'Name' + } + }, + additionalProperties: true, + type: 'object', + required: ['middleware', 'name'], + title: 'FastAPIRootMiddlewareResponse', + description: 'Serializer for Plugin FastAPI root middleware responses.' +} as const; + +export const $HITLDetail = { + properties: { + task_instance: { + '$ref': '#/components/schemas/TaskInstanceResponse' + }, + options: { + items: { + type: 'string' + }, + type: 'array', + minItems: 1, + title: 'Options' + }, + subject: { + type: 'string', + title: 'Subject' + }, + body: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Body' + }, + defaults: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Defaults' + }, + multiple: { + type: 'boolean', + title: 'Multiple', + default: false + }, + params: { + additionalProperties: true, + type: 'object', + title: 'Params' + }, + user_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'User Id' + }, + response_at: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Response At' + }, + chosen_options: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Chosen Options' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' + }, + response_received: { + type: 'boolean', + title: 'Response Received', + default: false + } + }, + type: 'object', + required: ['task_instance', 'options', 'subject'], + title: 'HITLDetail', + description: 'Schema for Human-in-the-loop detail.' +} as const; + +export const $HITLDetailCollection = { + properties: { + hitl_details: { + items: { + '$ref': '#/components/schemas/HITLDetail' + }, + type: 'array', + title: 'Hitl Details' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } + }, + type: 'object', + required: ['hitl_details', 'total_entries'], + title: 'HITLDetailCollection', + description: 'Schema for a collection of Human-in-the-loop details.' +} as const; + +export const $HITLDetailResponse = { + properties: { + user_id: { + type: 'string', + title: 'User Id' + }, + response_at: { + type: 'string', + format: 'date-time', + title: 'Response At' + }, + chosen_options: { + items: { + type: 'string' + }, + type: 'array', + minItems: 1, + title: 'Chosen Options' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' + } }, - }, - additionalProperties: true, - type: "object", - required: ["middleware", "name"], - title: "FastAPIRootMiddlewareResponse", - description: "Serializer for Plugin FastAPI root middleware responses.", + type: 'object', + required: ['user_id', 'response_at', 'chosen_options'], + title: 'HITLDetailResponse', + description: 'Response of updating a Human-in-the-loop detail.' } as const; export const $HTTPExceptionResponse = { - properties: { - detail: { - anyOf: [ - { - type: "string", - }, - { - additionalProperties: true, - type: "object", - }, - ], - title: "Detail", + properties: { + detail: { + anyOf: [ + { + type: 'string' + }, + { + additionalProperties: true, + type: 'object' + } + ], + title: 'Detail' + } }, - }, - type: "object", - required: ["detail"], - title: "HTTPExceptionResponse", - description: "HTTPException Model used for error response.", + type: 'object', + required: ['detail'], + title: 'HTTPExceptionResponse', + description: 'HTTPException Model used for error response.' } as const; export const $HTTPValidationError = { - properties: { - detail: { - items: { - $ref: "#/components/schemas/ValidationError", - }, - type: "array", - title: "Detail", + properties: { + detail: { + items: { + '$ref': '#/components/schemas/ValidationError' + }, + type: 'array', + title: 'Detail' + } }, - }, - type: "object", - title: "HTTPValidationError", + type: 'object', + title: 'HTTPValidationError' } as const; export const $HealthInfoResponse = { - properties: { - metadatabase: { - $ref: "#/components/schemas/BaseInfoResponse", - }, - scheduler: { - $ref: "#/components/schemas/SchedulerInfoResponse", - }, - triggerer: { - $ref: "#/components/schemas/TriggererInfoResponse", - }, - dag_processor: { - anyOf: [ - { - $ref: "#/components/schemas/DagProcessorInfoResponse", + properties: { + metadatabase: { + '$ref': '#/components/schemas/BaseInfoResponse' }, - { - type: "null", + scheduler: { + '$ref': '#/components/schemas/SchedulerInfoResponse' }, - ], + triggerer: { + '$ref': '#/components/schemas/TriggererInfoResponse' + }, + dag_processor: { + anyOf: [ + { + '$ref': '#/components/schemas/DagProcessorInfoResponse' + }, + { + type: 'null' + } + ] + } }, - }, - type: "object", - required: ["metadatabase", "scheduler", "triggerer"], - title: "HealthInfoResponse", - description: "Health serializer for responses.", + type: 'object', + required: ['metadatabase', 'scheduler', 'triggerer'], + title: 'HealthInfoResponse', + description: 'Health serializer for responses.' } as const; export const $ImportErrorCollectionResponse = { - properties: { - import_errors: { - items: { - $ref: "#/components/schemas/ImportErrorResponse", - }, - type: "array", - title: "Import Errors", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + import_errors: { + items: { + '$ref': '#/components/schemas/ImportErrorResponse' + }, + type: 'array', + title: 'Import Errors' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["import_errors", "total_entries"], - title: "ImportErrorCollectionResponse", - description: "Import Error Collection Response.", + type: 'object', + required: ['import_errors', 'total_entries'], + title: 'ImportErrorCollectionResponse', + description: 'Import Error Collection Response.' } as const; export const $ImportErrorResponse = { - properties: { - import_error_id: { - type: "integer", - title: "Import Error Id", - }, - timestamp: { - type: "string", - format: "date-time", - title: "Timestamp", - }, - filename: { - type: "string", - title: "Filename", - }, - bundle_name: { - anyOf: [ - { - type: "string", + properties: { + import_error_id: { + type: 'integer', + title: 'Import Error Id' }, - { - type: "null", + timestamp: { + type: 'string', + format: 'date-time', + title: 'Timestamp' }, - ], - title: "Bundle Name", - }, - stack_trace: { - type: "string", - title: "Stack Trace", + filename: { + type: 'string', + title: 'Filename' + }, + bundle_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Name' + }, + stack_trace: { + type: 'string', + title: 'Stack Trace' + } }, - }, - type: "object", - required: ["import_error_id", "timestamp", "filename", "bundle_name", "stack_trace"], - title: "ImportErrorResponse", - description: "Import Error Response.", + type: 'object', + required: ['import_error_id', 'timestamp', 'filename', 'bundle_name', 'stack_trace'], + title: 'ImportErrorResponse', + description: 'Import Error Response.' } as const; export const $JobCollectionResponse = { - properties: { - jobs: { - items: { - $ref: "#/components/schemas/JobResponse", - }, - type: "array", - title: "Jobs", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + jobs: { + items: { + '$ref': '#/components/schemas/JobResponse' + }, + type: 'array', + title: 'Jobs' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["jobs", "total_entries"], - title: "JobCollectionResponse", - description: "Job Collection Response.", + type: 'object', + required: ['jobs', 'total_entries'], + title: 'JobCollectionResponse', + description: 'Job Collection Response.' } as const; export const $JobResponse = { - properties: { - id: { - type: "integer", - title: "Id", - }, - dag_id: { - anyOf: [ - { - type: "string", + properties: { + id: { + type: 'integer', + title: 'Id' + }, + dag_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Dag Id' + }, + state: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'State' + }, + job_type: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Job Type' + }, + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' + }, + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' + }, + latest_heartbeat: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Latest Heartbeat' }, - { - type: "null", + executor_class: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Executor Class' + }, + hostname: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Hostname' + }, + unixname: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Unixname' }, - ], - title: "Dag Id", - }, - state: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "State", - }, - job_type: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Job Type", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date", - }, - latest_heartbeat: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Latest Heartbeat", - }, - executor_class: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Executor Class", - }, - hostname: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Hostname", - }, - unixname: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + dag_display_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Dag Display Name' + } + }, + type: 'object', + required: ['id', 'dag_id', 'state', 'job_type', 'start_date', 'end_date', 'latest_heartbeat', 'executor_class', 'hostname', 'unixname'], + title: 'JobResponse', + description: 'Job serializer for responses.' +} as const; + +export const $JsonValue = {} as const; + +export const $LastAssetEventResponse = { + properties: { + id: { + anyOf: [ + { + type: 'integer', + minimum: 0 + }, + { + type: 'null' + } + ], + title: 'Id' }, - ], - title: "Unixname", + timestamp: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Timestamp' + } }, - }, - type: "object", - required: [ - "id", - "dag_id", - "state", - "job_type", - "start_date", - "end_date", - "latest_heartbeat", - "executor_class", - "hostname", - "unixname", - ], - title: "JobResponse", - description: "Job serializer for responses.", + type: 'object', + title: 'LastAssetEventResponse', + description: 'Last asset event response serializer.' } as const; export const $PatchTaskInstanceBody = { - properties: { - new_state: { - anyOf: [ - { - $ref: "#/components/schemas/TaskInstanceState", + properties: { + new_state: { + anyOf: [ + { + '$ref': '#/components/schemas/TaskInstanceState' + }, + { + type: 'null' + } + ] + }, + note: { + anyOf: [ + { + type: 'string', + maxLength: 1000 + }, + { + type: 'null' + } + ], + title: 'Note' }, - { - type: "null", + include_upstream: { + type: 'boolean', + title: 'Include Upstream', + default: false }, - ], - }, - note: { - anyOf: [ - { - type: "string", - maxLength: 1000, + include_downstream: { + type: 'boolean', + title: 'Include Downstream', + default: false }, - { - type: "null", + include_future: { + type: 'boolean', + title: 'Include Future', + default: false }, - ], - title: "Note", - }, - include_upstream: { - type: "boolean", - title: "Include Upstream", - default: false, - }, - include_downstream: { - type: "boolean", - title: "Include Downstream", - default: false, + include_past: { + type: 'boolean', + title: 'Include Past', + default: false + } }, - include_future: { - type: "boolean", - title: "Include Future", - default: false, - }, - include_past: { - type: "boolean", - title: "Include Past", - default: false, - }, - }, - additionalProperties: false, - type: "object", - title: "PatchTaskInstanceBody", - description: "Request body for Clear Task Instances endpoint.", + additionalProperties: false, + type: 'object', + title: 'PatchTaskInstanceBody', + description: 'Request body for Clear Task Instances endpoint.' } as const; export const $PluginCollectionResponse = { - properties: { - plugins: { - items: { - $ref: "#/components/schemas/PluginResponse", - }, - type: "array", - title: "Plugins", + properties: { + plugins: { + items: { + '$ref': '#/components/schemas/PluginResponse' + }, + type: 'array', + title: 'Plugins' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } + }, + type: 'object', + required: ['plugins', 'total_entries'], + title: 'PluginCollectionResponse', + description: 'Plugin Collection serializer.' +} as const; + +export const $PluginImportErrorCollectionResponse = { + properties: { + import_errors: { + items: { + '$ref': '#/components/schemas/PluginImportErrorResponse' + }, + type: 'array', + title: 'Import Errors' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - total_entries: { - type: "integer", - title: "Total Entries", + type: 'object', + required: ['import_errors', 'total_entries'], + title: 'PluginImportErrorCollectionResponse', + description: 'Plugin Import Error Collection serializer.' +} as const; + +export const $PluginImportErrorResponse = { + properties: { + source: { + type: 'string', + title: 'Source' + }, + error: { + type: 'string', + title: 'Error' + } }, - }, - type: "object", - required: ["plugins", "total_entries"], - title: "PluginCollectionResponse", - description: "Plugin Collection serializer.", + type: 'object', + required: ['source', 'error'], + title: 'PluginImportErrorResponse', + description: 'Plugin Import Error serializer for responses.' } as const; export const $PluginResponse = { - properties: { - name: { - type: "string", - title: "Name", - }, - macros: { - items: { - type: "string", - }, - type: "array", - title: "Macros", - }, - flask_blueprints: { - items: { - type: "string", - }, - type: "array", - title: "Flask Blueprints", - }, - fastapi_apps: { - items: { - $ref: "#/components/schemas/FastAPIAppResponse", - }, - type: "array", - title: "Fastapi Apps", - }, - fastapi_root_middlewares: { - items: { - $ref: "#/components/schemas/FastAPIRootMiddlewareResponse", - }, - type: "array", - title: "Fastapi Root Middlewares", - }, - appbuilder_views: { - items: { - $ref: "#/components/schemas/AppBuilderViewResponse", - }, - type: "array", - title: "Appbuilder Views", - }, - appbuilder_menu_items: { - items: { - $ref: "#/components/schemas/AppBuilderMenuItemResponse", - }, - type: "array", - title: "Appbuilder Menu Items", - }, - global_operator_extra_links: { - items: { - type: "string", - }, - type: "array", - title: "Global Operator Extra Links", - }, - operator_extra_links: { - items: { - type: "string", - }, - type: "array", - title: "Operator Extra Links", - }, - source: { - type: "string", - title: "Source", - }, - listeners: { - items: { - type: "string", - }, - type: "array", - title: "Listeners", - }, - timetables: { - items: { - type: "string", - }, - type: "array", - title: "Timetables", - }, - }, - type: "object", - required: [ - "name", - "macros", - "flask_blueprints", - "fastapi_apps", - "fastapi_root_middlewares", - "appbuilder_views", - "appbuilder_menu_items", - "global_operator_extra_links", - "operator_extra_links", - "source", - "listeners", - "timetables", - ], - title: "PluginResponse", - description: "Plugin serializer.", + properties: { + name: { + type: 'string', + title: 'Name' + }, + macros: { + items: { + type: 'string' + }, + type: 'array', + title: 'Macros' + }, + flask_blueprints: { + items: { + type: 'string' + }, + type: 'array', + title: 'Flask Blueprints' + }, + fastapi_apps: { + items: { + '$ref': '#/components/schemas/FastAPIAppResponse' + }, + type: 'array', + title: 'Fastapi Apps' + }, + fastapi_root_middlewares: { + items: { + '$ref': '#/components/schemas/FastAPIRootMiddlewareResponse' + }, + type: 'array', + title: 'Fastapi Root Middlewares' + }, + external_views: { + items: { + '$ref': '#/components/schemas/ExternalViewResponse' + }, + type: 'array', + title: 'External Views', + description: "Aggregate all external views. Both 'external_views' and 'appbuilder_menu_items' are included here." + }, + react_apps: { + items: { + '$ref': '#/components/schemas/ReactAppResponse' + }, + type: 'array', + title: 'React Apps' + }, + appbuilder_views: { + items: { + '$ref': '#/components/schemas/AppBuilderViewResponse' + }, + type: 'array', + title: 'Appbuilder Views' + }, + appbuilder_menu_items: { + items: { + '$ref': '#/components/schemas/AppBuilderMenuItemResponse' + }, + type: 'array', + title: 'Appbuilder Menu Items', + deprecated: true + }, + global_operator_extra_links: { + items: { + type: 'string' + }, + type: 'array', + title: 'Global Operator Extra Links' + }, + operator_extra_links: { + items: { + type: 'string' + }, + type: 'array', + title: 'Operator Extra Links' + }, + source: { + type: 'string', + title: 'Source' + }, + listeners: { + items: { + type: 'string' + }, + type: 'array', + title: 'Listeners' + }, + timetables: { + items: { + type: 'string' + }, + type: 'array', + title: 'Timetables' + } + }, + type: 'object', + required: ['name', 'macros', 'flask_blueprints', 'fastapi_apps', 'fastapi_root_middlewares', 'external_views', 'react_apps', 'appbuilder_views', 'appbuilder_menu_items', 'global_operator_extra_links', 'operator_extra_links', 'source', 'listeners', 'timetables'], + title: 'PluginResponse', + description: 'Plugin serializer.' } as const; export const $PoolBody = { - properties: { - name: { - type: "string", - maxLength: 256, - title: "Name", - }, - slots: { - type: "integer", - title: "Slots", - }, - description: { - anyOf: [ - { - type: "string", + properties: { + name: { + type: 'string', + maxLength: 256, + title: 'Name' }, - { - type: "null", + slots: { + type: 'integer', + title: 'Slots' }, - ], - title: "Description", - }, - include_deferred: { - type: "boolean", - title: "Include Deferred", - default: false, + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' + }, + include_deferred: { + type: 'boolean', + title: 'Include Deferred', + default: false + } }, - }, - additionalProperties: false, - type: "object", - required: ["name", "slots"], - title: "PoolBody", - description: "Pool serializer for post bodies.", + additionalProperties: false, + type: 'object', + required: ['name', 'slots'], + title: 'PoolBody', + description: 'Pool serializer for post bodies.' } as const; export const $PoolCollectionResponse = { - properties: { - pools: { - items: { - $ref: "#/components/schemas/PoolResponse", - }, - type: "array", - title: "Pools", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + pools: { + items: { + '$ref': '#/components/schemas/PoolResponse' + }, + type: 'array', + title: 'Pools' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["pools", "total_entries"], - title: "PoolCollectionResponse", - description: "Pool Collection serializer for responses.", + type: 'object', + required: ['pools', 'total_entries'], + title: 'PoolCollectionResponse', + description: 'Pool Collection serializer for responses.' } as const; export const $PoolPatchBody = { - properties: { - pool: { - anyOf: [ - { - type: "string", + properties: { + pool: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Pool' }, - { - type: "null", + slots: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Slots' + }, + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' }, - ], - title: "Pool", + include_deferred: { + anyOf: [ + { + type: 'boolean' + }, + { + type: 'null' + } + ], + title: 'Include Deferred' + } }, - slots: { - anyOf: [ - { - type: "integer", + additionalProperties: false, + type: 'object', + title: 'PoolPatchBody', + description: 'Pool serializer for patch bodies.' +} as const; + +export const $PoolResponse = { + properties: { + name: { + type: 'string', + title: 'Name' }, - { - type: "null", + slots: { + type: 'integer', + title: 'Slots' }, - ], - title: "Slots", - }, - description: { - anyOf: [ - { - type: "string", + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' }, - { - type: "null", + include_deferred: { + type: 'boolean', + title: 'Include Deferred' }, - ], - title: "Description", - }, - include_deferred: { - anyOf: [ - { - type: "boolean", + occupied_slots: { + type: 'integer', + title: 'Occupied Slots' + }, + running_slots: { + type: 'integer', + title: 'Running Slots' + }, + queued_slots: { + type: 'integer', + title: 'Queued Slots' }, - { - type: "null", + scheduled_slots: { + type: 'integer', + title: 'Scheduled Slots' }, - ], - title: "Include Deferred", + open_slots: { + type: 'integer', + title: 'Open Slots' + }, + deferred_slots: { + type: 'integer', + title: 'Deferred Slots' + } }, - }, - additionalProperties: false, - type: "object", - title: "PoolPatchBody", - description: "Pool serializer for patch bodies.", -} as const; - -export const $PoolResponse = { - properties: { - name: { - type: "string", - title: "Name", - }, - slots: { - type: "integer", - title: "Slots", - }, - description: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Description", - }, - include_deferred: { - type: "boolean", - title: "Include Deferred", - }, - occupied_slots: { - type: "integer", - title: "Occupied Slots", - }, - running_slots: { - type: "integer", - title: "Running Slots", - }, - queued_slots: { - type: "integer", - title: "Queued Slots", - }, - scheduled_slots: { - type: "integer", - title: "Scheduled Slots", - }, - open_slots: { - type: "integer", - title: "Open Slots", - }, - deferred_slots: { - type: "integer", - title: "Deferred Slots", - }, - }, - type: "object", - required: [ - "name", - "slots", - "description", - "include_deferred", - "occupied_slots", - "running_slots", - "queued_slots", - "scheduled_slots", - "open_slots", - "deferred_slots", - ], - title: "PoolResponse", - description: "Pool serializer for responses.", + type: 'object', + required: ['name', 'slots', 'description', 'include_deferred', 'occupied_slots', 'running_slots', 'queued_slots', 'scheduled_slots', 'open_slots', 'deferred_slots'], + title: 'PoolResponse', + description: 'Pool serializer for responses.' } as const; export const $ProviderCollectionResponse = { - properties: { - providers: { - items: { - $ref: "#/components/schemas/ProviderResponse", - }, - type: "array", - title: "Providers", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + providers: { + items: { + '$ref': '#/components/schemas/ProviderResponse' + }, + type: 'array', + title: 'Providers' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["providers", "total_entries"], - title: "ProviderCollectionResponse", - description: "Provider Collection serializer for responses.", + type: 'object', + required: ['providers', 'total_entries'], + title: 'ProviderCollectionResponse', + description: 'Provider Collection serializer for responses.' } as const; export const $ProviderResponse = { - properties: { - package_name: { - type: "string", - title: "Package Name", - }, - description: { - type: "string", - title: "Description", - }, - version: { - type: "string", - title: "Version", + properties: { + package_name: { + type: 'string', + title: 'Package Name' + }, + description: { + type: 'string', + title: 'Description' + }, + version: { + type: 'string', + title: 'Version' + } }, - }, - type: "object", - required: ["package_name", "description", "version"], - title: "ProviderResponse", - description: "Provider serializer for responses.", + type: 'object', + required: ['package_name', 'description', 'version'], + title: 'ProviderResponse', + description: 'Provider serializer for responses.' } as const; export const $QueuedEventCollectionResponse = { - properties: { - queued_events: { - items: { - $ref: "#/components/schemas/QueuedEventResponse", - }, - type: "array", - title: "Queued Events", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + queued_events: { + items: { + '$ref': '#/components/schemas/QueuedEventResponse' + }, + type: 'array', + title: 'Queued Events' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["queued_events", "total_entries"], - title: "QueuedEventCollectionResponse", - description: "Queued Event Collection serializer for responses.", + type: 'object', + required: ['queued_events', 'total_entries'], + title: 'QueuedEventCollectionResponse', + description: 'Queued Event Collection serializer for responses.' } as const; export const $QueuedEventResponse = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - asset_id: { - type: "integer", - title: "Asset Id", - }, - created_at: { - type: "string", - format: "date-time", - title: "Created At", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' + }, + asset_id: { + type: 'integer', + title: 'Asset Id' + }, + created_at: { + type: 'string', + format: 'date-time', + title: 'Created At' + }, + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + } + }, + type: 'object', + required: ['dag_id', 'asset_id', 'created_at', 'dag_display_name'], + title: 'QueuedEventResponse', + description: 'Queued Event serializer for responses..' +} as const; + +export const $ReactAppResponse = { + properties: { + name: { + type: 'string', + title: 'Name' + }, + icon: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Icon' + }, + icon_dark_mode: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Icon Dark Mode' + }, + url_route: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Url Route' + }, + category: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Category' + }, + bundle_url: { + type: 'string', + title: 'Bundle Url' + }, + destination: { + type: 'string', + enum: ['nav', 'dag', 'dag_run', 'task', 'task_instance', 'dashboard'], + title: 'Destination', + default: 'nav' + } }, - }, - type: "object", - required: ["dag_id", "asset_id", "created_at"], - title: "QueuedEventResponse", - description: "Queued Event serializer for responses..", + additionalProperties: true, + type: 'object', + required: ['name', 'bundle_url'], + title: 'ReactAppResponse', + description: 'Serializer for React App Plugin responses.' } as const; export const $ReprocessBehavior = { - type: "string", - enum: ["failed", "completed", "none"], - title: "ReprocessBehavior", - description: `Internal enum for setting reprocess behavior in a backfill. + type: 'string', + enum: ['failed', 'completed', 'none'], + title: 'ReprocessBehavior', + description: `Internal enum for setting reprocess behavior in a backfill. -:meta private:`, +:meta private:` } as const; export const $SchedulerInfoResponse = { - properties: { - status: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Status", - }, - latest_scheduler_heartbeat: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + properties: { + status: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Status' }, - ], - title: "Latest Scheduler Heartbeat", + latest_scheduler_heartbeat: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Latest Scheduler Heartbeat' + } }, - }, - type: "object", - required: ["status", "latest_scheduler_heartbeat"], - title: "SchedulerInfoResponse", - description: "Scheduler info serializer for responses.", + type: 'object', + required: ['status', 'latest_scheduler_heartbeat'], + title: 'SchedulerInfoResponse', + description: 'Scheduler info serializer for responses.' } as const; export const $StructuredLogMessage = { - properties: { - timestamp: { - type: "string", - format: "date-time", - title: "Timestamp", - }, - event: { - type: "string", - title: "Event", + properties: { + timestamp: { + type: 'string', + format: 'date-time', + title: 'Timestamp' + }, + event: { + type: 'string', + title: 'Event' + } }, - }, - additionalProperties: true, - type: "object", - required: ["event"], - title: "StructuredLogMessage", - description: "An individual log message.", + additionalProperties: true, + type: 'object', + required: ['event'], + title: 'StructuredLogMessage', + description: 'An individual log message.' } as const; export const $TaskCollectionResponse = { - properties: { - tasks: { - items: { - $ref: "#/components/schemas/TaskResponse", - }, - type: "array", - title: "Tasks", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + tasks: { + items: { + '$ref': '#/components/schemas/TaskResponse' + }, + type: 'array', + title: 'Tasks' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["tasks", "total_entries"], - title: "TaskCollectionResponse", - description: "Task collection serializer for responses.", + type: 'object', + required: ['tasks', 'total_entries'], + title: 'TaskCollectionResponse', + description: 'Task collection serializer for responses.' } as const; export const $TaskDependencyCollectionResponse = { - properties: { - dependencies: { - items: { - $ref: "#/components/schemas/TaskDependencyResponse", - }, - type: "array", - title: "Dependencies", + properties: { + dependencies: { + items: { + '$ref': '#/components/schemas/TaskDependencyResponse' + }, + type: 'array', + title: 'Dependencies' + } }, - }, - type: "object", - required: ["dependencies"], - title: "TaskDependencyCollectionResponse", - description: "Task scheduling dependencies collection serializer for responses.", + type: 'object', + required: ['dependencies'], + title: 'TaskDependencyCollectionResponse', + description: 'Task scheduling dependencies collection serializer for responses.' } as const; export const $TaskDependencyResponse = { - properties: { - name: { - type: "string", - title: "Name", - }, - reason: { - type: "string", - title: "Reason", - }, - }, - type: "object", - required: ["name", "reason"], - title: "TaskDependencyResponse", - description: "Task Dependency serializer for responses.", + properties: { + name: { + type: 'string', + title: 'Name' + }, + reason: { + type: 'string', + title: 'Reason' + } + }, + type: 'object', + required: ['name', 'reason'], + title: 'TaskDependencyResponse', + description: 'Task Dependency serializer for responses.' +} as const; + +export const $TaskInletAssetReference = { + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' + }, + task_id: { + type: 'string', + title: 'Task Id' + }, + created_at: { + type: 'string', + format: 'date-time', + title: 'Created At' + }, + updated_at: { + type: 'string', + format: 'date-time', + title: 'Updated At' + } + }, + additionalProperties: false, + type: 'object', + required: ['dag_id', 'task_id', 'created_at', 'updated_at'], + title: 'TaskInletAssetReference', + description: 'Task inlet reference serializer for assets.' } as const; export const $TaskInstanceCollectionResponse = { - properties: { - task_instances: { - items: { - $ref: "#/components/schemas/TaskInstanceResponse", - }, - type: "array", - title: "Task Instances", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + task_instances: { + items: { + '$ref': '#/components/schemas/TaskInstanceResponse' + }, + type: 'array', + title: 'Task Instances' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["task_instances", "total_entries"], - title: "TaskInstanceCollectionResponse", - description: "Task Instance Collection serializer for responses.", + type: 'object', + required: ['task_instances', 'total_entries'], + title: 'TaskInstanceCollectionResponse', + description: 'Task Instance Collection serializer for responses.' } as const; export const $TaskInstanceHistoryCollectionResponse = { - properties: { - task_instances: { - items: { - $ref: "#/components/schemas/TaskInstanceHistoryResponse", - }, - type: "array", - title: "Task Instances", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + task_instances: { + items: { + '$ref': '#/components/schemas/TaskInstanceHistoryResponse' + }, + type: 'array', + title: 'Task Instances' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["task_instances", "total_entries"], - title: "TaskInstanceHistoryCollectionResponse", - description: "TaskInstanceHistory Collection serializer for responses.", + type: 'object', + required: ['task_instances', 'total_entries'], + title: 'TaskInstanceHistoryCollectionResponse', + description: 'TaskInstanceHistory Collection serializer for responses.' } as const; export const $TaskInstanceHistoryResponse = { - properties: { - task_id: { - type: "string", - title: "Task Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - dag_run_id: { - type: "string", - title: "Dag Run Id", - }, - map_index: { - type: "integer", - title: "Map Index", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + task_id: { + type: 'string', + title: 'Task Id' }, - { - type: "null", + dag_id: { + type: 'string', + title: 'Dag Id' }, - ], - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", + dag_run_id: { + type: 'string', + title: 'Dag Run Id' }, - { - type: "null", + map_index: { + type: 'integer', + title: 'Map Index' }, - ], - title: "End Date", - }, - duration: { - anyOf: [ - { - type: "number", + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' }, - { - type: "null", + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' }, - ], - title: "Duration", - }, - state: { - anyOf: [ - { - $ref: "#/components/schemas/TaskInstanceState", + duration: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Duration' }, - { - type: "null", + state: { + anyOf: [ + { + '$ref': '#/components/schemas/TaskInstanceState' + }, + { + type: 'null' + } + ] + }, + try_number: { + type: 'integer', + title: 'Try Number' + }, + max_tries: { + type: 'integer', + title: 'Max Tries' + }, + task_display_name: { + type: 'string', + title: 'Task Display Name' + }, + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + }, + hostname: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Hostname' }, - ], - }, - try_number: { - type: "integer", - title: "Try Number", - }, - max_tries: { - type: "integer", - title: "Max Tries", - }, - task_display_name: { - type: "string", - title: "Task Display Name", - }, - hostname: { - anyOf: [ - { - type: "string", + unixname: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Unixname' }, - { - type: "null", + pool: { + type: 'string', + title: 'Pool' }, - ], - title: "Hostname", - }, - unixname: { - anyOf: [ - { - type: "string", + pool_slots: { + type: 'integer', + title: 'Pool Slots' }, - { - type: "null", + queue: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Queue' }, - ], - title: "Unixname", - }, - pool: { - type: "string", - title: "Pool", - }, - pool_slots: { - type: "integer", - title: "Pool Slots", - }, - queue: { - anyOf: [ - { - type: "string", + priority_weight: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Priority Weight' + }, + operator: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Operator' + }, + queued_when: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Queued When' + }, + scheduled_when: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Scheduled When' + }, + pid: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Pid' + }, + executor: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Executor' }, - { - type: "null", + executor_config: { + type: 'string', + title: 'Executor Config' }, - ], - title: "Queue", - }, - priority_weight: { - anyOf: [ - { - type: "integer", - }, - { - type: "null", - }, - ], - title: "Priority Weight", - }, - operator: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Operator", - }, - queued_when: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Queued When", - }, - scheduled_when: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Scheduled When", - }, - pid: { - anyOf: [ - { - type: "integer", - }, - { - type: "null", - }, - ], - title: "Pid", - }, - executor: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Executor", - }, - executor_config: { - type: "string", - title: "Executor Config", - }, - dag_version: { - anyOf: [ - { - $ref: "#/components/schemas/DagVersionResponse", - }, - { - type: "null", - }, - ], - }, - }, - type: "object", - required: [ - "task_id", - "dag_id", - "dag_run_id", - "map_index", - "start_date", - "end_date", - "duration", - "state", - "try_number", - "max_tries", - "task_display_name", - "hostname", - "unixname", - "pool", - "pool_slots", - "queue", - "priority_weight", - "operator", - "queued_when", - "scheduled_when", - "pid", - "executor", - "executor_config", - "dag_version", - ], - title: "TaskInstanceHistoryResponse", - description: "TaskInstanceHistory serializer for responses.", + dag_version: { + anyOf: [ + { + '$ref': '#/components/schemas/DagVersionResponse' + }, + { + type: 'null' + } + ] + } + }, + type: 'object', + required: ['task_id', 'dag_id', 'dag_run_id', 'map_index', 'start_date', 'end_date', 'duration', 'state', 'try_number', 'max_tries', 'task_display_name', 'dag_display_name', 'hostname', 'unixname', 'pool', 'pool_slots', 'queue', 'priority_weight', 'operator', 'queued_when', 'scheduled_when', 'pid', 'executor', 'executor_config', 'dag_version'], + title: 'TaskInstanceHistoryResponse', + description: 'TaskInstanceHistory serializer for responses.' } as const; export const $TaskInstanceResponse = { - properties: { - id: { - type: "string", - title: "Id", - }, - task_id: { - type: "string", - title: "Task Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - dag_run_id: { - type: "string", - title: "Dag Run Id", - }, - map_index: { - type: "integer", - title: "Map Index", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + id: { + type: 'string', + title: 'Id' }, - { - type: "null", + task_id: { + type: 'string', + title: 'Task Id' }, - ], - title: "Logical Date", - }, - run_after: { - type: "string", - format: "date-time", - title: "Run After", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", + dag_id: { + type: 'string', + title: 'Dag Id' }, - { - type: "null", + dag_run_id: { + type: 'string', + title: 'Dag Run Id' }, - ], - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", + map_index: { + type: 'integer', + title: 'Map Index' }, - { - type: "null", + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' }, - ], - title: "End Date", - }, - duration: { - anyOf: [ - { - type: "number", + run_after: { + type: 'string', + format: 'date-time', + title: 'Run After' }, - { - type: "null", + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' }, - ], - title: "Duration", - }, - state: { - anyOf: [ - { - $ref: "#/components/schemas/TaskInstanceState", + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' }, - { - type: "null", + duration: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Duration' }, - ], - }, - try_number: { - type: "integer", - title: "Try Number", - }, - max_tries: { - type: "integer", - title: "Max Tries", - }, - task_display_name: { - type: "string", - title: "Task Display Name", - }, - hostname: { - anyOf: [ - { - type: "string", + state: { + anyOf: [ + { + '$ref': '#/components/schemas/TaskInstanceState' + }, + { + type: 'null' + } + ] + }, + try_number: { + type: 'integer', + title: 'Try Number' + }, + max_tries: { + type: 'integer', + title: 'Max Tries' + }, + task_display_name: { + type: 'string', + title: 'Task Display Name' + }, + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + }, + hostname: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Hostname' }, - { - type: "null", + unixname: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Unixname' }, - ], - title: "Hostname", - }, - unixname: { - anyOf: [ - { - type: "string", + pool: { + type: 'string', + title: 'Pool' }, - { - type: "null", + pool_slots: { + type: 'integer', + title: 'Pool Slots' }, - ], - title: "Unixname", - }, - pool: { - type: "string", - title: "Pool", - }, - pool_slots: { - type: "integer", - title: "Pool Slots", - }, - queue: { - anyOf: [ - { - type: "string", + queue: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Queue' + }, + priority_weight: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Priority Weight' }, - { - type: "null", + operator: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Operator' }, - ], - title: "Queue", - }, - priority_weight: { - anyOf: [ - { - type: "integer", + queued_when: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Queued When' }, - { - type: "null", + scheduled_when: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Scheduled When' }, - ], - title: "Priority Weight", - }, - operator: { - anyOf: [ - { - type: "string", + pid: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Pid' }, - { - type: "null", + executor: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Executor' }, - ], - title: "Operator", - }, - queued_when: { - anyOf: [ - { - type: "string", - format: "date-time", + executor_config: { + type: 'string', + title: 'Executor Config' }, - { - type: "null", + note: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Note' }, - ], - title: "Queued When", - }, - scheduled_when: { - anyOf: [ - { - type: "string", - format: "date-time", + rendered_map_index: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Rendered Map Index' }, - { - type: "null", + rendered_fields: { + additionalProperties: true, + type: 'object', + title: 'Rendered Fields' }, - ], - title: "Scheduled When", + trigger: { + anyOf: [ + { + '$ref': '#/components/schemas/TriggerResponse' + }, + { + type: 'null' + } + ] + }, + triggerer_job: { + anyOf: [ + { + '$ref': '#/components/schemas/JobResponse' + }, + { + type: 'null' + } + ] + }, + dag_version: { + anyOf: [ + { + '$ref': '#/components/schemas/DagVersionResponse' + }, + { + type: 'null' + } + ] + } }, - pid: { - anyOf: [ - { - type: "integer", - }, - { - type: "null", - }, - ], - title: "Pid", - }, - executor: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Executor", - }, - executor_config: { - type: "string", - title: "Executor Config", - }, - note: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Note", - }, - rendered_map_index: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Rendered Map Index", - }, - rendered_fields: { - additionalProperties: true, - type: "object", - title: "Rendered Fields", - }, - trigger: { - anyOf: [ - { - $ref: "#/components/schemas/TriggerResponse", - }, - { - type: "null", - }, - ], - }, - triggerer_job: { - anyOf: [ - { - $ref: "#/components/schemas/JobResponse", - }, - { - type: "null", - }, - ], - }, - dag_version: { - anyOf: [ - { - $ref: "#/components/schemas/DagVersionResponse", - }, - { - type: "null", - }, - ], - }, - }, - type: "object", - required: [ - "id", - "task_id", - "dag_id", - "dag_run_id", - "map_index", - "logical_date", - "run_after", - "start_date", - "end_date", - "duration", - "state", - "try_number", - "max_tries", - "task_display_name", - "hostname", - "unixname", - "pool", - "pool_slots", - "queue", - "priority_weight", - "operator", - "queued_when", - "scheduled_when", - "pid", - "executor", - "executor_config", - "note", - "rendered_map_index", - "trigger", - "triggerer_job", - "dag_version", - ], - title: "TaskInstanceResponse", - description: "TaskInstance serializer for responses.", + type: 'object', + required: ['id', 'task_id', 'dag_id', 'dag_run_id', 'map_index', 'logical_date', 'run_after', 'start_date', 'end_date', 'duration', 'state', 'try_number', 'max_tries', 'task_display_name', 'dag_display_name', 'hostname', 'unixname', 'pool', 'pool_slots', 'queue', 'priority_weight', 'operator', 'queued_when', 'scheduled_when', 'pid', 'executor', 'executor_config', 'note', 'rendered_map_index', 'trigger', 'triggerer_job', 'dag_version'], + title: 'TaskInstanceResponse', + description: 'TaskInstance serializer for responses.' } as const; export const $TaskInstanceState = { - type: "string", - enum: [ - "removed", - "scheduled", - "queued", - "running", - "success", - "restarting", - "failed", - "up_for_retry", - "up_for_reschedule", - "upstream_failed", - "skipped", - "deferred", - ], - title: "TaskInstanceState", - description: `All possible states that a Task Instance can be in. - -Note that None is also allowed, so always use this in a type hint with Optional.`, + type: 'string', + enum: ['removed', 'scheduled', 'queued', 'running', 'success', 'restarting', 'failed', 'up_for_retry', 'up_for_reschedule', 'upstream_failed', 'skipped', 'deferred'], + title: 'TaskInstanceState', + description: `All possible states that a Task Instance can be in. + +Note that None is also allowed, so always use this in a type hint with Optional.` } as const; export const $TaskInstancesBatchBody = { - properties: { - dag_ids: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Dag Ids", - }, - dag_run_ids: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Dag Run Ids", - }, - task_ids: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Task Ids", - }, - state: { - anyOf: [ - { - items: { - anyOf: [ - { - $ref: "#/components/schemas/TaskInstanceState", - }, - { - type: "null", - }, - ], - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "State", - }, - run_after_gte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Run After Gte", - }, - run_after_lte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Run After Lte", - }, - logical_date_gte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Logical Date Gte", - }, - logical_date_lte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Logical Date Lte", - }, - start_date_gte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Start Date Gte", - }, - start_date_lte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Start Date Lte", - }, - end_date_gte: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date Gte", - }, - end_date_lte: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + dag_ids: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Dag Ids' }, - { - type: "null", + dag_run_ids: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Dag Run Ids' }, - ], - title: "End Date Lte", - }, - duration_gte: { - anyOf: [ - { - type: "number", - }, - { - type: "null", + task_ids: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Task Ids' }, - ], - title: "Duration Gte", - }, - duration_lte: { - anyOf: [ - { - type: "number", + state: { + anyOf: [ + { + items: { + anyOf: [ + { + '$ref': '#/components/schemas/TaskInstanceState' + }, + { + type: 'null' + } + ] + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'State' }, - { - type: "null", - }, - ], - title: "Duration Lte", - }, - pool: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", + run_after_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Run After Gte' }, - ], - title: "Pool", - }, - queue: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", + run_after_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Run After Lte' }, - { - type: "null", - }, - ], - title: "Queue", - }, - executor: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", + logical_date_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date Gte' }, - { - type: "null", + logical_date_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date Lte' }, - ], - title: "Executor", - }, - page_offset: { - type: "integer", - minimum: 0, - title: "Page Offset", - default: 0, - }, - page_limit: { - type: "integer", - minimum: 0, - title: "Page Limit", - default: 100, - }, - order_by: { - anyOf: [ - { - type: "string", + start_date_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date Gte' }, - { - type: "null", + start_date_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date Lte' }, - ], - title: "Order By", - }, - }, - additionalProperties: false, - type: "object", - title: "TaskInstancesBatchBody", - description: "Task Instance body for get batch.", -} as const; - -export const $TaskInstancesLogResponse = { - properties: { - content: { - anyOf: [ - { - items: { - $ref: "#/components/schemas/StructuredLogMessage", - }, - type: "array", - }, - { - items: { - type: "string", - }, - type: "array", - }, - ], - title: "Content", - }, - continuation_token: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Continuation Token", - }, - }, - type: "object", - required: ["content", "continuation_token"], - title: "TaskInstancesLogResponse", - description: "Log serializer for responses.", -} as const; - -export const $TaskOutletAssetReference = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - task_id: { - type: "string", - title: "Task Id", - }, - created_at: { - type: "string", - format: "date-time", - title: "Created At", - }, - updated_at: { - type: "string", - format: "date-time", - title: "Updated At", - }, - }, - additionalProperties: false, - type: "object", - required: ["dag_id", "task_id", "created_at", "updated_at"], - title: "TaskOutletAssetReference", - description: "Task outlet reference serializer for assets.", -} as const; - -export const $TaskResponse = { - properties: { - task_id: { - anyOf: [ - { - type: "string", + end_date_gte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date Gte' }, - { - type: "null", + end_date_lte: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date Lte' }, - ], - title: "Task Id", - }, - task_display_name: { - anyOf: [ - { - type: "string", + duration_gte: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Duration Gte' + }, + duration_lte: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Duration Lte' }, - { - type: "null", + pool: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Pool' }, - ], - title: "Task Display Name", - }, - owner: { - anyOf: [ - { - type: "string", + queue: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Queue' }, - { - type: "null", + executor: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Executor' }, - ], - title: "Owner", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", + page_offset: { + type: 'integer', + minimum: 0, + title: 'Page Offset', + default: 0 }, - { - type: "null", + page_limit: { + type: 'integer', + minimum: 0, + title: 'Page Limit', + default: 100 }, - ], - title: "Start Date", + order_by: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Order By' + } }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", + additionalProperties: false, + type: 'object', + title: 'TaskInstancesBatchBody', + description: 'Task Instance body for get batch.' +} as const; + +export const $TaskInstancesLogResponse = { + properties: { + content: { + anyOf: [ + { + items: { + '$ref': '#/components/schemas/StructuredLogMessage' + }, + type: 'array' + }, + { + items: { + type: 'string' + }, + type: 'array' + } + ], + title: 'Content' }, - ], - title: "End Date", + continuation_token: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Continuation Token' + } }, - trigger_rule: { - anyOf: [ - { - type: "string", + type: 'object', + required: ['content', 'continuation_token'], + title: 'TaskInstancesLogResponse', + description: 'Log serializer for responses.' +} as const; + +export const $TaskOutletAssetReference = { + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' }, - { - type: "null", + task_id: { + type: 'string', + title: 'Task Id' }, - ], - title: "Trigger Rule", - }, - depends_on_past: { - type: "boolean", - title: "Depends On Past", - }, - wait_for_downstream: { - type: "boolean", - title: "Wait For Downstream", + created_at: { + type: 'string', + format: 'date-time', + title: 'Created At' + }, + updated_at: { + type: 'string', + format: 'date-time', + title: 'Updated At' + } }, - retries: { - anyOf: [ - { - type: "number", + additionalProperties: false, + type: 'object', + required: ['dag_id', 'task_id', 'created_at', 'updated_at'], + title: 'TaskOutletAssetReference', + description: 'Task outlet reference serializer for assets.' +} as const; + +export const $TaskResponse = { + properties: { + task_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Task Id' }, - { - type: "null", + task_display_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Task Display Name' }, - ], - title: "Retries", - }, - queue: { - anyOf: [ - { - type: "string", + owner: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Owner' }, - { - type: "null", + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' }, - ], - title: "Queue", - }, - pool: { - anyOf: [ - { - type: "string", + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' }, - { - type: "null", + trigger_rule: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Trigger Rule' }, - ], - title: "Pool", - }, - pool_slots: { - anyOf: [ - { - type: "number", + depends_on_past: { + type: 'boolean', + title: 'Depends On Past' }, - { - type: "null", + wait_for_downstream: { + type: 'boolean', + title: 'Wait For Downstream' }, - ], - title: "Pool Slots", - }, - execution_timeout: { - anyOf: [ - { - $ref: "#/components/schemas/TimeDelta", + retries: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Retries' + }, + queue: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Queue' }, - { - type: "null", + pool: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Pool' }, - ], - }, - retry_delay: { - anyOf: [ - { - $ref: "#/components/schemas/TimeDelta", + pool_slots: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Pool Slots' }, - { - type: "null", + execution_timeout: { + anyOf: [ + { + '$ref': '#/components/schemas/TimeDelta' + }, + { + type: 'null' + } + ] + }, + retry_delay: { + anyOf: [ + { + '$ref': '#/components/schemas/TimeDelta' + }, + { + type: 'null' + } + ] + }, + retry_exponential_backoff: { + type: 'boolean', + title: 'Retry Exponential Backoff' + }, + priority_weight: { + anyOf: [ + { + type: 'number' + }, + { + type: 'null' + } + ], + title: 'Priority Weight' }, - ], - }, - retry_exponential_backoff: { - type: "boolean", - title: "Retry Exponential Backoff", - }, - priority_weight: { - anyOf: [ - { - type: "number", + weight_rule: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Weight Rule' }, - { - type: "null", + ui_color: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Ui Color' }, - ], - title: "Priority Weight", - }, - weight_rule: { - anyOf: [ - { - type: "string", + ui_fgcolor: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Ui Fgcolor' }, - { - type: "null", + template_fields: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Template Fields' }, - ], - title: "Weight Rule", - }, - ui_color: { - anyOf: [ - { - type: "string", + downstream_task_ids: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Downstream Task Ids' }, - { - type: "null", + doc_md: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Doc Md' }, - ], - title: "Ui Color", - }, - ui_fgcolor: { - anyOf: [ - { - type: "string", + operator_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Operator Name' + }, + params: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Params' }, - { - type: "null", + class_ref: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Class Ref' + }, + is_mapped: { + anyOf: [ + { + type: 'boolean' + }, + { + type: 'null' + } + ], + title: 'Is Mapped' }, - ], - title: "Ui Fgcolor", + extra_links: { + items: { + type: 'string' + }, + type: 'array', + title: 'Extra Links', + description: 'Extract and return extra_links.', + readOnly: true + } }, - template_fields: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Template Fields", - }, - downstream_task_ids: { - anyOf: [ - { - items: { - type: "string", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Downstream Task Ids", - }, - doc_md: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Doc Md", - }, - operator_name: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Operator Name", - }, - params: { - anyOf: [ - { - additionalProperties: true, - type: "object", - }, - { - type: "null", - }, - ], - title: "Params", - }, - class_ref: { - anyOf: [ - { - additionalProperties: true, - type: "object", - }, - { - type: "null", - }, - ], - title: "Class Ref", - }, - is_mapped: { - anyOf: [ - { - type: "boolean", - }, - { - type: "null", - }, - ], - title: "Is Mapped", - }, - extra_links: { - items: { - type: "string", - }, - type: "array", - title: "Extra Links", - description: "Extract and return extra_links.", - readOnly: true, - }, - }, - type: "object", - required: [ - "task_id", - "task_display_name", - "owner", - "start_date", - "end_date", - "trigger_rule", - "depends_on_past", - "wait_for_downstream", - "retries", - "queue", - "pool", - "pool_slots", - "execution_timeout", - "retry_delay", - "retry_exponential_backoff", - "priority_weight", - "weight_rule", - "ui_color", - "ui_fgcolor", - "template_fields", - "downstream_task_ids", - "doc_md", - "operator_name", - "params", - "class_ref", - "is_mapped", - "extra_links", - ], - title: "TaskResponse", - description: "Task serializer for responses.", + type: 'object', + required: ['task_id', 'task_display_name', 'owner', 'start_date', 'end_date', 'trigger_rule', 'depends_on_past', 'wait_for_downstream', 'retries', 'queue', 'pool', 'pool_slots', 'execution_timeout', 'retry_delay', 'retry_exponential_backoff', 'priority_weight', 'weight_rule', 'ui_color', 'ui_fgcolor', 'template_fields', 'downstream_task_ids', 'doc_md', 'operator_name', 'params', 'class_ref', 'is_mapped', 'extra_links'], + title: 'TaskResponse', + description: 'Task serializer for responses.' } as const; export const $TimeDelta = { - properties: { - __type: { - type: "string", - title: "Type", - default: "TimeDelta", - }, - days: { - type: "integer", - title: "Days", - }, - seconds: { - type: "integer", - title: "Seconds", - }, - microseconds: { - type: "integer", - title: "Microseconds", - }, - }, - type: "object", - required: ["days", "seconds", "microseconds"], - title: "TimeDelta", - description: "TimeDelta can be used to interact with datetime.timedelta objects.", -} as const; - -export const $TriggerDAGRunPostBody = { - properties: { - dag_run_id: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Dag Run Id", - }, - data_interval_start: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + __type: { + type: 'string', + title: 'Type', + default: 'TimeDelta' }, - { - type: "null", + days: { + type: 'integer', + title: 'Days' }, - ], - title: "Data Interval Start", - }, - data_interval_end: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", + seconds: { + type: 'integer', + title: 'Seconds' }, - ], - title: "Data Interval End", + microseconds: { + type: 'integer', + title: 'Microseconds' + } }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", + type: 'object', + required: ['days', 'seconds', 'microseconds'], + title: 'TimeDelta', + description: 'TimeDelta can be used to interact with datetime.timedelta objects.' +} as const; + +export const $TriggerDAGRunPostBody = { + properties: { + dag_run_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Dag Run Id' }, - { - type: "null", + data_interval_start: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Data Interval Start' }, - ], - title: "Logical Date", - }, - run_after: { - anyOf: [ - { - type: "string", - format: "date-time", + data_interval_end: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Data Interval End' }, - { - type: "null", + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' }, - ], - title: "Run After", - }, - conf: { - additionalProperties: true, - type: "object", - title: "Conf", - }, - note: { - anyOf: [ - { - type: "string", + run_after: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Run After' }, - { - type: "null", + conf: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Conf' }, - ], - title: "Note", + note: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Note' + } }, - }, - additionalProperties: false, - type: "object", - required: ["logical_date"], - title: "TriggerDAGRunPostBody", - description: "Trigger DAG Run Serializer for POST body.", + additionalProperties: false, + type: 'object', + required: ['logical_date'], + title: 'TriggerDAGRunPostBody', + description: 'Trigger DAG Run Serializer for POST body.' } as const; export const $TriggerResponse = { - properties: { - id: { - type: "integer", - title: "Id", - }, - classpath: { - type: "string", - title: "Classpath", - }, - kwargs: { - type: "string", - title: "Kwargs", - }, - created_date: { - type: "string", - format: "date-time", - title: "Created Date", - }, - triggerer_id: { - anyOf: [ - { - type: "integer", + properties: { + id: { + type: 'integer', + title: 'Id' + }, + classpath: { + type: 'string', + title: 'Classpath' + }, + kwargs: { + type: 'string', + title: 'Kwargs' }, - { - type: "null", + created_date: { + type: 'string', + format: 'date-time', + title: 'Created Date' }, - ], - title: "Triggerer Id", + triggerer_id: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Triggerer Id' + } }, - }, - type: "object", - required: ["id", "classpath", "kwargs", "created_date", "triggerer_id"], - title: "TriggerResponse", - description: "Trigger serializer for responses.", + type: 'object', + required: ['id', 'classpath', 'kwargs', 'created_date', 'triggerer_id'], + title: 'TriggerResponse', + description: 'Trigger serializer for responses.' } as const; export const $TriggererInfoResponse = { - properties: { - status: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + properties: { + status: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Status' }, - ], - title: "Status", + latest_triggerer_heartbeat: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Latest Triggerer Heartbeat' + } }, - latest_triggerer_heartbeat: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + type: 'object', + required: ['status', 'latest_triggerer_heartbeat'], + title: 'TriggererInfoResponse', + description: 'Triggerer info serializer for responses.' +} as const; + +export const $UpdateHITLDetailPayload = { + properties: { + chosen_options: { + items: { + type: 'string' + }, + type: 'array', + minItems: 1, + title: 'Chosen Options' }, - ], - title: "Latest Triggerer Heartbeat", + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' + } }, - }, - type: "object", - required: ["status", "latest_triggerer_heartbeat"], - title: "TriggererInfoResponse", - description: "Triggerer info serializer for responses.", + type: 'object', + required: ['chosen_options'], + title: 'UpdateHITLDetailPayload', + description: 'Schema for updating the content of a Human-in-the-loop detail.' } as const; export const $ValidationError = { - properties: { - loc: { - items: { - anyOf: [ - { - type: "string", - }, - { - type: "integer", - }, - ], - }, - type: "array", - title: "Location", - }, - msg: { - type: "string", - title: "Message", - }, - type: { - type: "string", - title: "Error Type", - }, - }, - type: "object", - required: ["loc", "msg", "type"], - title: "ValidationError", + properties: { + loc: { + items: { + anyOf: [ + { + type: 'string' + }, + { + type: 'integer' + } + ] + }, + type: 'array', + title: 'Location' + }, + msg: { + type: 'string', + title: 'Message' + }, + type: { + type: 'string', + title: 'Error Type' + } + }, + type: 'object', + required: ['loc', 'msg', 'type'], + title: 'ValidationError' } as const; export const $VariableBody = { - properties: { - key: { - type: "string", - maxLength: 250, - title: "Key", - }, - value: { - type: "string", - title: "Value", - }, - description: { - anyOf: [ - { - type: "string", + properties: { + key: { + type: 'string', + maxLength: 250, + title: 'Key' }, - { - type: "null", + value: { + '$ref': '#/components/schemas/JsonValue' }, - ], - title: "Description", + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' + } }, - }, - additionalProperties: false, - type: "object", - required: ["key", "value"], - title: "VariableBody", - description: "Variable serializer for bodies.", + additionalProperties: false, + type: 'object', + required: ['key', 'value'], + title: 'VariableBody', + description: 'Variable serializer for bodies.' } as const; export const $VariableCollectionResponse = { - properties: { - variables: { - items: { - $ref: "#/components/schemas/VariableResponse", - }, - type: "array", - title: "Variables", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + variables: { + items: { + '$ref': '#/components/schemas/VariableResponse' + }, + type: 'array', + title: 'Variables' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["variables", "total_entries"], - title: "VariableCollectionResponse", - description: "Variable Collection serializer for responses.", + type: 'object', + required: ['variables', 'total_entries'], + title: 'VariableCollectionResponse', + description: 'Variable Collection serializer for responses.' } as const; export const $VariableResponse = { - properties: { - key: { - type: "string", - title: "Key", - }, - value: { - type: "string", - title: "Value", - }, - description: { - anyOf: [ - { - type: "string", + properties: { + key: { + type: 'string', + title: 'Key' }, - { - type: "null", + value: { + type: 'string', + title: 'Value' }, - ], - title: "Description", - }, - is_encrypted: { - type: "boolean", - title: "Is Encrypted", + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' + }, + is_encrypted: { + type: 'boolean', + title: 'Is Encrypted' + } }, - }, - type: "object", - required: ["key", "value", "description", "is_encrypted"], - title: "VariableResponse", - description: "Variable serializer for responses.", + type: 'object', + required: ['key', 'value', 'description', 'is_encrypted'], + title: 'VariableResponse', + description: 'Variable serializer for responses.' } as const; export const $VersionInfo = { - properties: { - version: { - type: "string", - title: "Version", - }, - git_version: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + properties: { + version: { + type: 'string', + title: 'Version' }, - ], - title: "Git Version", + git_version: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Git Version' + } }, - }, - type: "object", - required: ["version", "git_version"], - title: "VersionInfo", - description: "Version information serializer for responses.", + type: 'object', + required: ['version', 'git_version'], + title: 'VersionInfo', + description: 'Version information serializer for responses.' } as const; export const $XComCollectionResponse = { - properties: { - xcom_entries: { - items: { - $ref: "#/components/schemas/XComResponse", - }, - type: "array", - title: "Xcom Entries", - }, - total_entries: { - type: "integer", - title: "Total Entries", + properties: { + xcom_entries: { + items: { + '$ref': '#/components/schemas/XComResponse' + }, + type: 'array', + title: 'Xcom Entries' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } }, - }, - type: "object", - required: ["xcom_entries", "total_entries"], - title: "XComCollectionResponse", - description: "XCom Collection serializer for responses.", + type: 'object', + required: ['xcom_entries', 'total_entries'], + title: 'XComCollectionResponse', + description: 'XCom Collection serializer for responses.' } as const; export const $XComCreateBody = { - properties: { - key: { - type: "string", - title: "Key", - }, - value: { - title: "Value", - }, - map_index: { - type: "integer", - title: "Map Index", - default: -1, + properties: { + key: { + type: 'string', + title: 'Key' + }, + value: { + title: 'Value' + }, + map_index: { + type: 'integer', + title: 'Map Index', + default: -1 + } }, - }, - additionalProperties: false, - type: "object", - required: ["key", "value"], - title: "XComCreateBody", - description: "Payload serializer for creating an XCom entry.", + additionalProperties: false, + type: 'object', + required: ['key', 'value'], + title: 'XComCreateBody', + description: 'Payload serializer for creating an XCom entry.' } as const; export const $XComResponse = { - properties: { - key: { - type: "string", - title: "Key", - }, - timestamp: { - type: "string", - format: "date-time", - title: "Timestamp", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + key: { + type: 'string', + title: 'Key' }, - { - type: "null", + timestamp: { + type: 'string', + format: 'date-time', + title: 'Timestamp' }, - ], - title: "Logical Date", - }, - map_index: { - type: "integer", - title: "Map Index", - }, - task_id: { - type: "string", - title: "Task Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - run_id: { - type: "string", - title: "Run Id", + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' + }, + map_index: { + type: 'integer', + title: 'Map Index' + }, + task_id: { + type: 'string', + title: 'Task Id' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + }, + run_id: { + type: 'string', + title: 'Run Id' + }, + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + } }, - }, - type: "object", - required: ["key", "timestamp", "logical_date", "map_index", "task_id", "dag_id", "run_id"], - title: "XComResponse", - description: "Serializer for a xcom item.", + type: 'object', + required: ['key', 'timestamp', 'logical_date', 'map_index', 'task_id', 'dag_id', 'run_id', 'dag_display_name'], + title: 'XComResponse', + description: 'Serializer for a xcom item.' } as const; export const $XComResponseNative = { - properties: { - key: { - type: "string", - title: "Key", - }, - timestamp: { - type: "string", - format: "date-time", - title: "Timestamp", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + key: { + type: 'string', + title: 'Key' }, - { - type: "null", + timestamp: { + type: 'string', + format: 'date-time', + title: 'Timestamp' }, - ], - title: "Logical Date", - }, - map_index: { - type: "integer", - title: "Map Index", - }, - task_id: { - type: "string", - title: "Task Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - run_id: { - type: "string", - title: "Run Id", - }, - value: { - title: "Value", + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' + }, + map_index: { + type: 'integer', + title: 'Map Index' + }, + task_id: { + type: 'string', + title: 'Task Id' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + }, + run_id: { + type: 'string', + title: 'Run Id' + }, + dag_display_name: { + type: 'string', + title: 'Dag Display Name' + }, + value: { + title: 'Value' + } }, - }, - type: "object", - required: ["key", "timestamp", "logical_date", "map_index", "task_id", "dag_id", "run_id", "value"], - title: "XComResponseNative", - description: "XCom response serializer with native return type.", + type: 'object', + required: ['key', 'timestamp', 'logical_date', 'map_index', 'task_id', 'dag_id', 'run_id', 'dag_display_name', 'value'], + title: 'XComResponseNative', + description: 'XCom response serializer with native return type.' } as const; export const $XComResponseString = { - properties: { - key: { - type: "string", - title: "Key", - }, - timestamp: { - type: "string", - format: "date-time", - title: "Timestamp", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + key: { + type: 'string', + title: 'Key' }, - { - type: "null", + timestamp: { + type: 'string', + format: 'date-time', + title: 'Timestamp' }, - ], - title: "Logical Date", - }, - map_index: { - type: "integer", - title: "Map Index", - }, - task_id: { - type: "string", - title: "Task Id", - }, - dag_id: { - type: "string", - title: "Dag Id", - }, - run_id: { - type: "string", - title: "Run Id", - }, - value: { - anyOf: [ - { - type: "string", + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' + }, + map_index: { + type: 'integer', + title: 'Map Index' + }, + task_id: { + type: 'string', + title: 'Task Id' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + }, + run_id: { + type: 'string', + title: 'Run Id' }, - { - type: "null", + dag_display_name: { + type: 'string', + title: 'Dag Display Name' }, - ], - title: "Value", + value: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Value' + } }, - }, - type: "object", - required: ["key", "timestamp", "logical_date", "map_index", "task_id", "dag_id", "run_id", "value"], - title: "XComResponseString", - description: "XCom response serializer with string return type.", + type: 'object', + required: ['key', 'timestamp', 'logical_date', 'map_index', 'task_id', 'dag_id', 'run_id', 'dag_display_name', 'value'], + title: 'XComResponseString', + description: 'XCom response serializer with string return type.' } as const; export const $XComUpdateBody = { - properties: { - value: { - title: "Value", - }, - map_index: { - type: "integer", - title: "Map Index", - default: -1, + properties: { + value: { + title: 'Value' + }, + map_index: { + type: 'integer', + title: 'Map Index', + default: -1 + } }, - }, - additionalProperties: false, - type: "object", - required: ["value"], - title: "XComUpdateBody", - description: "Payload serializer for updating an XCom entry.", + additionalProperties: false, + type: 'object', + required: ['value'], + title: 'XComUpdateBody', + description: 'Payload serializer for updating an XCom entry.' } as const; export const $BaseEdgeResponse = { - properties: { - source_id: { - type: "string", - title: "Source Id", - }, - target_id: { - type: "string", - title: "Target Id", + properties: { + source_id: { + type: 'string', + title: 'Source Id' + }, + target_id: { + type: 'string', + title: 'Target Id' + } }, - }, - type: "object", - required: ["source_id", "target_id"], - title: "BaseEdgeResponse", - description: "Base Edge serializer for responses.", + type: 'object', + required: ['source_id', 'target_id'], + title: 'BaseEdgeResponse', + description: 'Base Edge serializer for responses.' } as const; export const $BaseGraphResponse = { - properties: { - edges: { - items: { - $ref: "#/components/schemas/BaseEdgeResponse", - }, - type: "array", - title: "Edges", - }, - nodes: { - items: { - $ref: "#/components/schemas/BaseNodeResponse", - }, - type: "array", - title: "Nodes", - }, - }, - type: "object", - required: ["edges", "nodes"], - title: "BaseGraphResponse", - description: "Base Graph serializer for responses.", + properties: { + edges: { + items: { + '$ref': '#/components/schemas/BaseEdgeResponse' + }, + type: 'array', + title: 'Edges' + }, + nodes: { + items: { + '$ref': '#/components/schemas/BaseNodeResponse' + }, + type: 'array', + title: 'Nodes' + } + }, + type: 'object', + required: ['edges', 'nodes'], + title: 'BaseGraphResponse', + description: 'Base Graph serializer for responses.' } as const; export const $BaseNodeResponse = { - properties: { - id: { - type: "string", - title: "Id", - }, - label: { - type: "string", - title: "Label", - }, - type: { - type: "string", - enum: [ - "join", - "task", - "asset-condition", - "asset", - "asset-alias", - "asset-name-ref", - "asset-uri-ref", - "dag", - "sensor", - "trigger", - ], - title: "Type", - }, - }, - type: "object", - required: ["id", "label", "type"], - title: "BaseNodeResponse", - description: "Base Node serializer for responses.", + properties: { + id: { + type: 'string', + title: 'Id' + }, + label: { + type: 'string', + title: 'Label' + }, + type: { + type: 'string', + enum: ['join', 'task', 'asset-condition', 'asset', 'asset-alias', 'asset-name-ref', 'asset-uri-ref', 'dag', 'sensor', 'trigger'], + title: 'Type' + } + }, + type: 'object', + required: ['id', 'label', 'type'], + title: 'BaseNodeResponse', + description: 'Base Node serializer for responses.' +} as const; + +export const $CalendarTimeRangeCollectionResponse = { + properties: { + total_entries: { + type: 'integer', + title: 'Total Entries' + }, + dag_runs: { + items: { + '$ref': '#/components/schemas/CalendarTimeRangeResponse' + }, + type: 'array', + title: 'Dag Runs' + } + }, + type: 'object', + required: ['total_entries', 'dag_runs'], + title: 'CalendarTimeRangeCollectionResponse', + description: 'Response model for calendar time range results.' } as const; -export const $ConfigResponse = { - properties: { - navbar_color: { - type: "string", - title: "Navbar Color", - }, - navbar_text_color: { - type: "string", - title: "Navbar Text Color", - }, - navbar_hover_color: { - type: "string", - title: "Navbar Hover Color", - }, - navbar_text_hover_color: { - type: "string", - title: "Navbar Text Hover Color", - }, - page_size: { - type: "integer", - title: "Page Size", - }, - auto_refresh_interval: { - type: "integer", - title: "Auto Refresh Interval", - }, - hide_paused_dags_by_default: { - type: "boolean", - title: "Hide Paused Dags By Default", - }, - instance_name: { - type: "string", - title: "Instance Name", - }, - instance_name_has_markup: { - type: "boolean", - title: "Instance Name Has Markup", - }, - enable_swagger_ui: { - type: "boolean", - title: "Enable Swagger Ui", - }, - require_confirmation_dag_change: { - type: "boolean", - title: "Require Confirmation Dag Change", - }, - default_wrap: { - type: "boolean", - title: "Default Wrap", - }, - warn_deployment_exposure: { - type: "boolean", - title: "Warn Deployment Exposure", - }, - audit_view_excluded_events: { - type: "string", - title: "Audit View Excluded Events", - }, - audit_view_included_events: { - type: "string", - title: "Audit View Included Events", - }, - test_connection: { - type: "string", - title: "Test Connection", - }, - dashboard_alert: { - items: { - $ref: "#/components/schemas/UIAlert", - }, - type: "array", - title: "Dashboard Alert", - }, - }, - type: "object", - required: [ - "navbar_color", - "navbar_text_color", - "navbar_hover_color", - "navbar_text_hover_color", - "page_size", - "auto_refresh_interval", - "hide_paused_dags_by_default", - "instance_name", - "instance_name_has_markup", - "enable_swagger_ui", - "require_confirmation_dag_change", - "default_wrap", - "warn_deployment_exposure", - "audit_view_excluded_events", - "audit_view_included_events", - "test_connection", - "dashboard_alert", - ], - title: "ConfigResponse", - description: "configuration serializer.", +export const $CalendarTimeRangeResponse = { + properties: { + date: { + type: 'string', + format: 'date-time', + title: 'Date' + }, + state: { + type: 'string', + enum: ['queued', 'running', 'success', 'failed', 'planned'], + title: 'State' + }, + count: { + type: 'integer', + title: 'Count' + } + }, + type: 'object', + required: ['date', 'state', 'count'], + title: 'CalendarTimeRangeResponse', + description: 'Represents a summary of DAG runs for a specific calendar time range.' } as const; -export const $ConnectionHookFieldBehavior = { - properties: { - hidden: { - type: "boolean", - title: "Hidden", - description: "Flag if the form field should be hidden.", - default: false, - }, - title: { - anyOf: [ - { - type: "string", +export const $ConfigResponse = { + properties: { + page_size: { + type: 'integer', + title: 'Page Size' + }, + auto_refresh_interval: { + type: 'integer', + title: 'Auto Refresh Interval' + }, + hide_paused_dags_by_default: { + type: 'boolean', + title: 'Hide Paused Dags By Default' + }, + instance_name: { + type: 'string', + title: 'Instance Name' + }, + enable_swagger_ui: { + type: 'boolean', + title: 'Enable Swagger Ui' + }, + require_confirmation_dag_change: { + type: 'boolean', + title: 'Require Confirmation Dag Change' + }, + default_wrap: { + type: 'boolean', + title: 'Default Wrap' + }, + test_connection: { + type: 'string', + title: 'Test Connection' + }, + dashboard_alert: { + items: { + '$ref': '#/components/schemas/UIAlert' + }, + type: 'array', + title: 'Dashboard Alert' }, - { - type: "null", + show_external_log_redirect: { + type: 'boolean', + title: 'Show External Log Redirect' }, - ], - title: "Title", - description: - "Label / title for the field that should be displayed, if re-labelling is needed. Use `None` to display standard title.", + external_log_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'External Log Name' + } }, - placeholder: { - anyOf: [ - { - type: "string", - }, - { - type: "null", + type: 'object', + required: ['page_size', 'auto_refresh_interval', 'hide_paused_dags_by_default', 'instance_name', 'enable_swagger_ui', 'require_confirmation_dag_change', 'default_wrap', 'test_connection', 'dashboard_alert', 'show_external_log_redirect'], + title: 'ConfigResponse', + description: 'configuration serializer.' +} as const; + +export const $ConnectionHookFieldBehavior = { + properties: { + hidden: { + type: 'boolean', + title: 'Hidden', + description: 'Flag if the form field should be hidden.', + default: false + }, + title: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Title', + description: 'Label / title for the field that should be displayed, if re-labelling is needed. Use `None` to display standard title.' }, - ], - title: "Placeholder", - description: "Placeholder text that should be populated to the form.", + placeholder: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Placeholder', + description: 'Placeholder text that should be populated to the form.' + } }, - }, - type: "object", - title: "ConnectionHookFieldBehavior", - description: "A class to store the behavior of each standard field of a Hook.", + type: 'object', + title: 'ConnectionHookFieldBehavior', + description: 'A class to store the behavior of each standard field of a Hook.' } as const; export const $ConnectionHookMetaData = { - properties: { - connection_type: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Connection Type", - }, - hook_class_name: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Hook Class Name", - }, - default_conn_name: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Default Conn Name", - }, - hook_name: { - type: "string", - title: "Hook Name", - }, - standard_fields: { - anyOf: [ - { - $ref: "#/components/schemas/StandardHookFields", - }, - { - type: "null", - }, - ], - }, - extra_fields: { - anyOf: [ - { - additionalProperties: true, - type: "object", - }, - { - type: "null", - }, - ], - title: "Extra Fields", - }, - }, - type: "object", - required: [ - "connection_type", - "hook_class_name", - "default_conn_name", - "hook_name", - "standard_fields", - "extra_fields", - ], - title: "ConnectionHookMetaData", - description: `Response model for Hook information == Connection type meta data. + properties: { + connection_type: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Connection Type' + }, + hook_class_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Hook Class Name' + }, + default_conn_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Default Conn Name' + }, + hook_name: { + type: 'string', + title: 'Hook Name' + }, + standard_fields: { + anyOf: [ + { + '$ref': '#/components/schemas/StandardHookFields' + }, + { + type: 'null' + } + ] + }, + extra_fields: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Extra Fields' + } + }, + type: 'object', + required: ['connection_type', 'hook_class_name', 'default_conn_name', 'hook_name', 'standard_fields', 'extra_fields'], + title: 'ConnectionHookMetaData', + description: `Response model for Hook information == Connection type meta data. It is used to transfer providers information loaded by providers_manager such that -the API server/Web UI can use this data to render connection form UI.`, +the API server/Web UI can use this data to render connection form UI.` } as const; -export const $DAGRunStates = { - properties: { - queued: { - type: "integer", - title: "Queued", - }, - running: { - type: "integer", - title: "Running", - }, - success: { - type: "integer", - title: "Success", +export const $DAGRunLightResponse = { + properties: { + id: { + type: 'integer', + title: 'Id' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + }, + run_id: { + type: 'string', + title: 'Run Id' + }, + logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Logical Date' + }, + run_after: { + type: 'string', + format: 'date-time', + title: 'Run After' + }, + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' + }, + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' + }, + state: { + '$ref': '#/components/schemas/DagRunState' + } }, - failed: { - type: "integer", - title: "Failed", + type: 'object', + required: ['id', 'dag_id', 'run_id', 'logical_date', 'run_after', 'start_date', 'end_date', 'state'], + title: 'DAGRunLightResponse', + description: 'DAG Run serializer for responses.' +} as const; + +export const $DAGRunStates = { + properties: { + queued: { + type: 'integer', + title: 'Queued' + }, + running: { + type: 'integer', + title: 'Running' + }, + success: { + type: 'integer', + title: 'Success' + }, + failed: { + type: 'integer', + title: 'Failed' + } }, - }, - type: "object", - required: ["queued", "running", "success", "failed"], - title: "DAGRunStates", - description: "DAG Run States for responses.", + type: 'object', + required: ['queued', 'running', 'success', 'failed'], + title: 'DAGRunStates', + description: 'DAG Run States for responses.' } as const; export const $DAGRunTypes = { - properties: { - backfill: { - type: "integer", - title: "Backfill", - }, - scheduled: { - type: "integer", - title: "Scheduled", - }, - manual: { - type: "integer", - title: "Manual", - }, - asset_triggered: { - type: "integer", - title: "Asset Triggered", + properties: { + backfill: { + type: 'integer', + title: 'Backfill' + }, + scheduled: { + type: 'integer', + title: 'Scheduled' + }, + manual: { + type: 'integer', + title: 'Manual' + }, + asset_triggered: { + type: 'integer', + title: 'Asset Triggered' + } }, - }, - type: "object", - required: ["backfill", "scheduled", "manual", "asset_triggered"], - title: "DAGRunTypes", - description: "DAG Run Types for responses.", + type: 'object', + required: ['backfill', 'scheduled', 'manual', 'asset_triggered'], + title: 'DAGRunTypes', + description: 'DAG Run Types for responses.' } as const; export const $DAGWithLatestDagRunsCollectionResponse = { - properties: { - total_entries: { - type: "integer", - title: "Total Entries", - }, - dags: { - items: { - $ref: "#/components/schemas/DAGWithLatestDagRunsResponse", - }, - type: "array", - title: "Dags", + properties: { + total_entries: { + type: 'integer', + title: 'Total Entries' + }, + dags: { + items: { + '$ref': '#/components/schemas/DAGWithLatestDagRunsResponse' + }, + type: 'array', + title: 'Dags' + } }, - }, - type: "object", - required: ["total_entries", "dags"], - title: "DAGWithLatestDagRunsCollectionResponse", - description: "DAG with latest dag runs collection response serializer.", + type: 'object', + required: ['total_entries', 'dags'], + title: 'DAGWithLatestDagRunsCollectionResponse', + description: 'DAG with latest dag runs collection response serializer.' } as const; export const $DAGWithLatestDagRunsResponse = { - properties: { - dag_id: { - type: "string", - title: "Dag Id", - }, - dag_display_name: { - type: "string", - title: "Dag Display Name", - }, - is_paused: { - type: "boolean", - title: "Is Paused", - }, - is_stale: { - type: "boolean", - title: "Is Stale", - }, - last_parsed_time: { - anyOf: [ - { - type: "string", - format: "date-time", + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' }, - { - type: "null", + dag_display_name: { + type: 'string', + title: 'Dag Display Name' }, - ], - title: "Last Parsed Time", - }, - last_expired: { - anyOf: [ - { - type: "string", - format: "date-time", + is_paused: { + type: 'boolean', + title: 'Is Paused' }, - { - type: "null", + is_stale: { + type: 'boolean', + title: 'Is Stale' }, - ], - title: "Last Expired", - }, - bundle_name: { - anyOf: [ - { - type: "string", + last_parsed_time: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Parsed Time' }, - { - type: "null", + last_expired: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Last Expired' }, - ], - title: "Bundle Name", - }, - relative_fileloc: { - anyOf: [ - { - type: "string", + bundle_name: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Name' }, - { - type: "null", + bundle_version: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Bundle Version' }, - ], - title: "Relative Fileloc", - }, - fileloc: { - type: "string", - title: "Fileloc", - }, - description: { - anyOf: [ - { - type: "string", + relative_fileloc: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Relative Fileloc' }, - { - type: "null", + fileloc: { + type: 'string', + title: 'Fileloc' }, - ], - title: "Description", - }, - timetable_summary: { - anyOf: [ - { - type: "string", + description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Description' + }, + timetable_summary: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Timetable Summary' + }, + timetable_description: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Timetable Description' + }, + tags: { + items: { + '$ref': '#/components/schemas/DagTagResponse' + }, + type: 'array', + title: 'Tags' + }, + max_active_tasks: { + type: 'integer', + title: 'Max Active Tasks' + }, + max_active_runs: { + anyOf: [ + { + type: 'integer' + }, + { + type: 'null' + } + ], + title: 'Max Active Runs' + }, + max_consecutive_failed_dag_runs: { + type: 'integer', + title: 'Max Consecutive Failed Dag Runs' + }, + has_task_concurrency_limits: { + type: 'boolean', + title: 'Has Task Concurrency Limits' + }, + has_import_errors: { + type: 'boolean', + title: 'Has Import Errors' + }, + next_dagrun_logical_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Logical Date' + }, + next_dagrun_data_interval_start: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Data Interval Start' + }, + next_dagrun_data_interval_end: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Data Interval End' + }, + next_dagrun_run_after: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Next Dagrun Run After' + }, + owners: { + items: { + type: 'string' + }, + type: 'array', + title: 'Owners' + }, + asset_expression: { + anyOf: [ + { + additionalProperties: true, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Asset Expression' }, - { - type: "null", + latest_dag_runs: { + items: { + '$ref': '#/components/schemas/DAGRunResponse' + }, + type: 'array', + title: 'Latest Dag Runs' }, - ], - title: "Timetable Summary", + file_token: { + type: 'string', + title: 'File Token', + description: 'Return file token.', + readOnly: true + } }, - timetable_description: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Timetable Description", - }, - tags: { - items: { - $ref: "#/components/schemas/DagTagResponse", - }, - type: "array", - title: "Tags", - }, - max_active_tasks: { - type: "integer", - title: "Max Active Tasks", - }, - max_active_runs: { - anyOf: [ - { - type: "integer", - }, - { - type: "null", - }, - ], - title: "Max Active Runs", - }, - max_consecutive_failed_dag_runs: { - type: "integer", - title: "Max Consecutive Failed Dag Runs", - }, - has_task_concurrency_limits: { - type: "boolean", - title: "Has Task Concurrency Limits", - }, - has_import_errors: { - type: "boolean", - title: "Has Import Errors", - }, - next_dagrun_logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Logical Date", - }, - next_dagrun_data_interval_start: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Data Interval Start", - }, - next_dagrun_data_interval_end: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Data Interval End", - }, - next_dagrun_run_after: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Next Dagrun Run After", - }, - owners: { - items: { - type: "string", - }, - type: "array", - title: "Owners", - }, - asset_expression: { - anyOf: [ - { - additionalProperties: true, - type: "object", - }, - { - type: "null", - }, - ], - title: "Asset Expression", - }, - latest_dag_runs: { - items: { - $ref: "#/components/schemas/DAGRunResponse", - }, - type: "array", - title: "Latest Dag Runs", - }, - file_token: { - type: "string", - title: "File Token", - description: "Return file token.", - readOnly: true, - }, - }, - type: "object", - required: [ - "dag_id", - "dag_display_name", - "is_paused", - "is_stale", - "last_parsed_time", - "last_expired", - "bundle_name", - "relative_fileloc", - "fileloc", - "description", - "timetable_summary", - "timetable_description", - "tags", - "max_active_tasks", - "max_active_runs", - "max_consecutive_failed_dag_runs", - "has_task_concurrency_limits", - "has_import_errors", - "next_dagrun_logical_date", - "next_dagrun_data_interval_start", - "next_dagrun_data_interval_end", - "next_dagrun_run_after", - "owners", - "asset_expression", - "latest_dag_runs", - "file_token", - ], - title: "DAGWithLatestDagRunsResponse", - description: "DAG with latest dag runs response serializer.", + type: 'object', + required: ['dag_id', 'dag_display_name', 'is_paused', 'is_stale', 'last_parsed_time', 'last_expired', 'bundle_name', 'bundle_version', 'relative_fileloc', 'fileloc', 'description', 'timetable_summary', 'timetable_description', 'tags', 'max_active_tasks', 'max_active_runs', 'max_consecutive_failed_dag_runs', 'has_task_concurrency_limits', 'has_import_errors', 'next_dagrun_logical_date', 'next_dagrun_data_interval_start', 'next_dagrun_data_interval_end', 'next_dagrun_run_after', 'owners', 'asset_expression', 'latest_dag_runs', 'file_token'], + title: 'DAGWithLatestDagRunsResponse', + description: 'DAG with latest dag runs response serializer.' } as const; -export const $EdgeResponse = { - properties: { - source_id: { - type: "string", - title: "Source Id", - }, - target_id: { - type: "string", - title: "Target Id", - }, - is_setup_teardown: { - anyOf: [ - { - type: "boolean", +export const $DashboardDagStatsResponse = { + properties: { + active_dag_count: { + type: 'integer', + title: 'Active Dag Count' }, - { - type: "null", + failed_dag_count: { + type: 'integer', + title: 'Failed Dag Count' }, - ], - title: "Is Setup Teardown", + running_dag_count: { + type: 'integer', + title: 'Running Dag Count' + }, + queued_dag_count: { + type: 'integer', + title: 'Queued Dag Count' + } }, - label: { - anyOf: [ - { - type: "string", + type: 'object', + required: ['active_dag_count', 'failed_dag_count', 'running_dag_count', 'queued_dag_count'], + title: 'DashboardDagStatsResponse', + description: 'Dashboard DAG Stats serializer for responses.' +} as const; + +export const $EdgeResponse = { + properties: { + source_id: { + type: 'string', + title: 'Source Id' }, - { - type: "null", + target_id: { + type: 'string', + title: 'Target Id' }, - ], - title: "Label", - }, - is_source_asset: { - anyOf: [ - { - type: "boolean", + is_setup_teardown: { + anyOf: [ + { + type: 'boolean' + }, + { + type: 'null' + } + ], + title: 'Is Setup Teardown' }, - { - type: "null", + label: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Label' }, - ], - title: "Is Source Asset", + is_source_asset: { + anyOf: [ + { + type: 'boolean' + }, + { + type: 'null' + } + ], + title: 'Is Source Asset' + } }, - }, - type: "object", - required: ["source_id", "target_id"], - title: "EdgeResponse", - description: "Edge serializer for responses.", + type: 'object', + required: ['source_id', 'target_id'], + title: 'EdgeResponse', + description: 'Edge serializer for responses.' } as const; export const $ExtraMenuItem = { - properties: { - text: { - type: "string", - title: "Text", + properties: { + text: { + type: 'string', + title: 'Text' + }, + href: { + type: 'string', + title: 'Href' + } }, - href: { - type: "string", - title: "Href", + type: 'object', + required: ['text', 'href'], + title: 'ExtraMenuItem' +} as const; + +export const $GridNodeResponse = { + properties: { + id: { + type: 'string', + title: 'Id' + }, + label: { + type: 'string', + title: 'Label' + }, + children: { + anyOf: [ + { + items: { + '$ref': '#/components/schemas/GridNodeResponse' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Children' + }, + is_mapped: { + anyOf: [ + { + type: 'boolean' + }, + { + type: 'null' + } + ], + title: 'Is Mapped' + }, + setup_teardown_type: { + anyOf: [ + { + type: 'string', + enum: ['setup', 'teardown'] + }, + { + type: 'null' + } + ], + title: 'Setup Teardown Type' + } }, - }, - type: "object", - required: ["text", "href"], - title: "ExtraMenuItem", + type: 'object', + required: ['id', 'label', 'is_mapped'], + title: 'GridNodeResponse', + description: 'Base Node serializer for responses.' } as const; -export const $GridDAGRunwithTIs = { - properties: { - dag_run_id: { - type: "string", - title: "Dag Run Id", - }, - queued_at: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Queued At", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date", - }, - run_after: { - type: "string", - format: "date-time", - title: "Run After", - }, - state: { - $ref: "#/components/schemas/DagRunState", - }, - run_type: { - $ref: "#/components/schemas/DagRunType", - }, - logical_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Logical Date", - }, - data_interval_start: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Data Interval Start", - }, - data_interval_end: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Data Interval End", - }, - note: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Note", - }, - task_instances: { - items: { - $ref: "#/components/schemas/GridTaskInstanceSummary", - }, - type: "array", - title: "Task Instances", - }, - }, - type: "object", - required: [ - "dag_run_id", - "queued_at", - "start_date", - "end_date", - "run_after", - "state", - "run_type", - "logical_date", - "data_interval_start", - "data_interval_end", - "note", - "task_instances", - ], - title: "GridDAGRunwithTIs", - description: "DAG Run model for the Grid UI.", -} as const; - -export const $GridResponse = { - properties: { - dag_runs: { - items: { - $ref: "#/components/schemas/GridDAGRunwithTIs", - }, - type: "array", - title: "Dag Runs", - }, - structure: { - $ref: "#/components/schemas/StructureDataResponse", - }, - }, - type: "object", - required: ["dag_runs", "structure"], - title: "GridResponse", - description: "Response model for the Grid UI.", -} as const; - -export const $GridTaskInstanceSummary = { - properties: { - task_id: { - type: "string", - title: "Task Id", - }, - try_number: { - type: "integer", - title: "Try Number", - }, - start_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Start Date", - }, - end_date: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "End Date", - }, - queued_dttm: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Queued Dttm", - }, - child_states: { - anyOf: [ - { - additionalProperties: { - type: "integer", - }, - type: "object", - }, - { - type: "null", - }, - ], - title: "Child States", - }, - task_count: { - type: "integer", - title: "Task Count", - }, - state: { - anyOf: [ - { - $ref: "#/components/schemas/TaskInstanceState", - }, - { - type: "null", - }, - ], - }, - note: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Note", - }, - }, - type: "object", - required: [ - "task_id", - "try_number", - "start_date", - "end_date", - "queued_dttm", - "child_states", - "task_count", - "state", - "note", - ], - title: "GridTaskInstanceSummary", - description: "Task Instance Summary model for the Grid UI.", +export const $GridRunsResponse = { + properties: { + dag_id: { + type: 'string', + title: 'Dag Id' + }, + run_id: { + type: 'string', + title: 'Run Id' + }, + queued_at: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Queued At' + }, + start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Start Date' + }, + end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'End Date' + }, + run_after: { + type: 'string', + format: 'date-time', + title: 'Run After' + }, + state: { + anyOf: [ + { + '$ref': '#/components/schemas/TaskInstanceState' + }, + { + type: 'null' + } + ] + }, + run_type: { + '$ref': '#/components/schemas/DagRunType' + }, + duration: { + type: 'integer', + title: 'Duration', + readOnly: true + } + }, + type: 'object', + required: ['dag_id', 'run_id', 'queued_at', 'start_date', 'end_date', 'run_after', 'state', 'run_type', 'duration'], + title: 'GridRunsResponse', + description: 'Base Node serializer for responses.' +} as const; + +export const $GridTISummaries = { + properties: { + run_id: { + type: 'string', + title: 'Run Id' + }, + dag_id: { + type: 'string', + title: 'Dag Id' + }, + task_instances: { + items: { + '$ref': '#/components/schemas/LightGridTaskInstanceSummary' + }, + type: 'array', + title: 'Task Instances' + } + }, + type: 'object', + required: ['run_id', 'dag_id', 'task_instances'], + title: 'GridTISummaries', + description: 'DAG Run model for the Grid UI.' } as const; export const $HistoricalMetricDataResponse = { - properties: { - dag_run_types: { - $ref: "#/components/schemas/DAGRunTypes", - }, - dag_run_states: { - $ref: "#/components/schemas/DAGRunStates", + properties: { + dag_run_types: { + '$ref': '#/components/schemas/DAGRunTypes' + }, + dag_run_states: { + '$ref': '#/components/schemas/DAGRunStates' + }, + task_instance_states: { + '$ref': '#/components/schemas/TaskInstanceStateCount' + } }, - task_instance_states: { - $ref: "#/components/schemas/TaskInstanceStateCount", + type: 'object', + required: ['dag_run_types', 'dag_run_states', 'task_instance_states'], + title: 'HistoricalMetricDataResponse', + description: 'Historical Metric Data serializer for responses.' +} as const; + +export const $LightGridTaskInstanceSummary = { + properties: { + task_id: { + type: 'string', + title: 'Task Id' + }, + state: { + anyOf: [ + { + '$ref': '#/components/schemas/TaskInstanceState' + }, + { + type: 'null' + } + ] + }, + child_states: { + anyOf: [ + { + additionalProperties: { + type: 'integer' + }, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Child States' + }, + min_start_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Min Start Date' + }, + max_end_date: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Max End Date' + } }, - }, - type: "object", - required: ["dag_run_types", "dag_run_states", "task_instance_states"], - title: "HistoricalMetricDataResponse", - description: "Historical Metric Data serializer for responses.", + type: 'object', + required: ['task_id', 'state', 'child_states', 'min_start_date', 'max_end_date'], + title: 'LightGridTaskInstanceSummary', + description: 'Task Instance Summary model for the Grid UI.' } as const; export const $MenuItem = { - type: "string", - enum: [ - "Assets", - "Audit Log", - "Config", - "Connections", - "Dags", - "Docs", - "Plugins", - "Pools", - "Providers", - "Variables", - "XComs", - ], - title: "MenuItem", - description: "Define all menu items defined in the menu.", + type: 'string', + enum: ['Required Actions', 'Assets', 'Audit Log', 'Config', 'Connections', 'Dags', 'Docs', 'Plugins', 'Pools', 'Providers', 'Variables', 'XComs'], + title: 'MenuItem', + description: 'Define all menu items defined in the menu.' } as const; export const $MenuItemCollectionResponse = { - properties: { - authorized_menu_items: { - items: { - $ref: "#/components/schemas/MenuItem", - }, - type: "array", - title: "Authorized Menu Items", - }, - extra_menu_items: { - items: { - $ref: "#/components/schemas/ExtraMenuItem", - }, - type: "array", - title: "Extra Menu Items", - }, - }, - type: "object", - required: ["authorized_menu_items", "extra_menu_items"], - title: "MenuItemCollectionResponse", - description: "Menu Item Collection serializer for responses.", + properties: { + authorized_menu_items: { + items: { + '$ref': '#/components/schemas/MenuItem' + }, + type: 'array', + title: 'Authorized Menu Items' + }, + extra_menu_items: { + items: { + '$ref': '#/components/schemas/ExtraMenuItem' + }, + type: 'array', + title: 'Extra Menu Items' + } + }, + type: 'object', + required: ['authorized_menu_items', 'extra_menu_items'], + title: 'MenuItemCollectionResponse', + description: 'Menu Item Collection serializer for responses.' } as const; export const $NodeResponse = { - properties: { - id: { - type: "string", - title: "Id", - }, - label: { - type: "string", - title: "Label", - }, - type: { - type: "string", - enum: [ - "join", - "task", - "asset-condition", - "asset", - "asset-alias", - "asset-name-ref", - "asset-uri-ref", - "dag", - "sensor", - "trigger", - ], - title: "Type", - }, - children: { - anyOf: [ - { - items: { - $ref: "#/components/schemas/NodeResponse", - }, - type: "array", - }, - { - type: "null", - }, - ], - title: "Children", - }, - is_mapped: { - anyOf: [ - { - type: "boolean", - }, - { - type: "null", - }, - ], - title: "Is Mapped", - }, - tooltip: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Tooltip", - }, - setup_teardown_type: { - anyOf: [ - { - type: "string", - enum: ["setup", "teardown"], - }, - { - type: "null", - }, - ], - title: "Setup Teardown Type", - }, - operator: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Operator", - }, - asset_condition_type: { - anyOf: [ - { - type: "string", - enum: ["or-gate", "and-gate"], - }, - { - type: "null", - }, - ], - title: "Asset Condition Type", - }, - }, - type: "object", - required: ["id", "label", "type"], - title: "NodeResponse", - description: "Node serializer for responses.", -} as const; - -export const $StandardHookFields = { - properties: { - description: { - anyOf: [ - { - $ref: "#/components/schemas/ConnectionHookFieldBehavior", - }, - { - type: "null", - }, - ], - }, - url_schema: { - anyOf: [ - { - $ref: "#/components/schemas/ConnectionHookFieldBehavior", + properties: { + id: { + type: 'string', + title: 'Id' }, - { - type: "null", + label: { + type: 'string', + title: 'Label' }, - ], - }, - host: { - anyOf: [ - { - $ref: "#/components/schemas/ConnectionHookFieldBehavior", + type: { + type: 'string', + enum: ['join', 'task', 'asset-condition', 'asset', 'asset-alias', 'asset-name-ref', 'asset-uri-ref', 'dag', 'sensor', 'trigger'], + title: 'Type' }, - { - type: "null", + children: { + anyOf: [ + { + items: { + '$ref': '#/components/schemas/NodeResponse' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Children' }, - ], - }, - port: { - anyOf: [ - { - $ref: "#/components/schemas/ConnectionHookFieldBehavior", + is_mapped: { + anyOf: [ + { + type: 'boolean' + }, + { + type: 'null' + } + ], + title: 'Is Mapped' }, - { - type: "null", + tooltip: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Tooltip' }, - ], - }, - login: { - anyOf: [ - { - $ref: "#/components/schemas/ConnectionHookFieldBehavior", + setup_teardown_type: { + anyOf: [ + { + type: 'string', + enum: ['setup', 'teardown'] + }, + { + type: 'null' + } + ], + title: 'Setup Teardown Type' }, - { - type: "null", + operator: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Operator' }, - ], + asset_condition_type: { + anyOf: [ + { + type: 'string', + enum: ['or-gate', 'and-gate'] + }, + { + type: 'null' + } + ], + title: 'Asset Condition Type' + } }, - password: { - anyOf: [ - { - $ref: "#/components/schemas/ConnectionHookFieldBehavior", - }, - { - type: "null", - }, - ], + type: 'object', + required: ['id', 'label', 'type'], + title: 'NodeResponse', + description: 'Node serializer for responses.' +} as const; + +export const $StandardHookFields = { + properties: { + description: { + anyOf: [ + { + '$ref': '#/components/schemas/ConnectionHookFieldBehavior' + }, + { + type: 'null' + } + ] + }, + url_schema: { + anyOf: [ + { + '$ref': '#/components/schemas/ConnectionHookFieldBehavior' + }, + { + type: 'null' + } + ] + }, + host: { + anyOf: [ + { + '$ref': '#/components/schemas/ConnectionHookFieldBehavior' + }, + { + type: 'null' + } + ] + }, + port: { + anyOf: [ + { + '$ref': '#/components/schemas/ConnectionHookFieldBehavior' + }, + { + type: 'null' + } + ] + }, + login: { + anyOf: [ + { + '$ref': '#/components/schemas/ConnectionHookFieldBehavior' + }, + { + type: 'null' + } + ] + }, + password: { + anyOf: [ + { + '$ref': '#/components/schemas/ConnectionHookFieldBehavior' + }, + { + type: 'null' + } + ] + } }, - }, - type: "object", - required: ["description", "url_schema", "host", "port", "login", "password"], - title: "StandardHookFields", - description: "Standard fields of a Hook that a form will render.", + type: 'object', + required: ['description', 'url_schema', 'host', 'port', 'login', 'password'], + title: 'StandardHookFields', + description: 'Standard fields of a Hook that a form will render.' } as const; export const $StructureDataResponse = { - properties: { - edges: { - items: { - $ref: "#/components/schemas/EdgeResponse", - }, - type: "array", - title: "Edges", - }, - nodes: { - items: { - $ref: "#/components/schemas/NodeResponse", - }, - type: "array", - title: "Nodes", - }, - }, - type: "object", - required: ["edges", "nodes"], - title: "StructureDataResponse", - description: "Structure Data serializer for responses.", + properties: { + edges: { + items: { + '$ref': '#/components/schemas/EdgeResponse' + }, + type: 'array', + title: 'Edges' + }, + nodes: { + items: { + '$ref': '#/components/schemas/NodeResponse' + }, + type: 'array', + title: 'Nodes' + } + }, + type: 'object', + required: ['edges', 'nodes'], + title: 'StructureDataResponse', + description: 'Structure Data serializer for responses.' } as const; export const $TaskInstanceStateCount = { - properties: { - no_status: { - type: "integer", - title: "No Status", - }, - removed: { - type: "integer", - title: "Removed", - }, - scheduled: { - type: "integer", - title: "Scheduled", - }, - queued: { - type: "integer", - title: "Queued", - }, - running: { - type: "integer", - title: "Running", - }, - success: { - type: "integer", - title: "Success", - }, - restarting: { - type: "integer", - title: "Restarting", - }, - failed: { - type: "integer", - title: "Failed", - }, - up_for_retry: { - type: "integer", - title: "Up For Retry", - }, - up_for_reschedule: { - type: "integer", - title: "Up For Reschedule", - }, - upstream_failed: { - type: "integer", - title: "Upstream Failed", - }, - skipped: { - type: "integer", - title: "Skipped", - }, - deferred: { - type: "integer", - title: "Deferred", - }, - }, - type: "object", - required: [ - "no_status", - "removed", - "scheduled", - "queued", - "running", - "success", - "restarting", - "failed", - "up_for_retry", - "up_for_reschedule", - "upstream_failed", - "skipped", - "deferred", - ], - title: "TaskInstanceStateCount", - description: "TaskInstance serializer for responses.", + properties: { + no_status: { + type: 'integer', + title: 'No Status' + }, + removed: { + type: 'integer', + title: 'Removed' + }, + scheduled: { + type: 'integer', + title: 'Scheduled' + }, + queued: { + type: 'integer', + title: 'Queued' + }, + running: { + type: 'integer', + title: 'Running' + }, + success: { + type: 'integer', + title: 'Success' + }, + restarting: { + type: 'integer', + title: 'Restarting' + }, + failed: { + type: 'integer', + title: 'Failed' + }, + up_for_retry: { + type: 'integer', + title: 'Up For Retry' + }, + up_for_reschedule: { + type: 'integer', + title: 'Up For Reschedule' + }, + upstream_failed: { + type: 'integer', + title: 'Upstream Failed' + }, + skipped: { + type: 'integer', + title: 'Skipped' + }, + deferred: { + type: 'integer', + title: 'Deferred' + } + }, + type: 'object', + required: ['no_status', 'removed', 'scheduled', 'queued', 'running', 'success', 'restarting', 'failed', 'up_for_retry', 'up_for_reschedule', 'upstream_failed', 'skipped', 'deferred'], + title: 'TaskInstanceStateCount', + description: 'TaskInstance serializer for responses.' } as const; export const $UIAlert = { - properties: { - text: { - type: "string", - title: "Text", - }, - category: { - type: "string", - enum: ["info", "warning", "error"], - title: "Category", - }, - }, - type: "object", - required: ["text", "category"], - title: "UIAlert", - description: "Optional alert to be shown at the top of the page.", -} as const; + properties: { + text: { + type: 'string', + title: 'Text' + }, + category: { + type: 'string', + enum: ['info', 'warning', 'error'], + title: 'Category' + } + }, + type: 'object', + required: ['text', 'category'], + title: 'UIAlert', + description: 'Optional alert to be shown at the top of the page.' +} as const; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts index 385f8256e7fd9..3ddb842f32238 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts @@ -1,3587 +1,3923 @@ // This file is auto-generated by @hey-api/openapi-ts -import type { CancelablePromise } from "./core/CancelablePromise"; -import { OpenAPI } from "./core/OpenAPI"; -import { request as __request } from "./core/request"; -import type { - GetAssetsData, - GetAssetsResponse, - GetAssetAliasesData, - GetAssetAliasesResponse, - GetAssetAliasData, - GetAssetAliasResponse, - GetAssetEventsData, - GetAssetEventsResponse, - CreateAssetEventData, - CreateAssetEventResponse, - MaterializeAssetData, - MaterializeAssetResponse, - GetAssetQueuedEventsData, - GetAssetQueuedEventsResponse, - DeleteAssetQueuedEventsData, - DeleteAssetQueuedEventsResponse, - GetAssetData, - GetAssetResponse, - GetDagAssetQueuedEventsData, - GetDagAssetQueuedEventsResponse, - DeleteDagAssetQueuedEventsData, - DeleteDagAssetQueuedEventsResponse, - GetDagAssetQueuedEventData, - GetDagAssetQueuedEventResponse, - DeleteDagAssetQueuedEventData, - DeleteDagAssetQueuedEventResponse, - NextRunAssetsData, - NextRunAssetsResponse, - ListBackfillsData, - ListBackfillsResponse, - CreateBackfillData, - CreateBackfillResponse, - GetBackfillData, - GetBackfillResponse, - PauseBackfillData, - PauseBackfillResponse, - UnpauseBackfillData, - UnpauseBackfillResponse, - CancelBackfillData, - CancelBackfillResponse, - CreateBackfillDryRunData, - CreateBackfillDryRunResponse, - ListBackfills1Data, - ListBackfills1Response, - DeleteConnectionData, - DeleteConnectionResponse, - GetConnectionData, - GetConnectionResponse, - PatchConnectionData, - PatchConnectionResponse, - GetConnectionsData, - GetConnectionsResponse, - PostConnectionData, - PostConnectionResponse, - BulkConnectionsData, - BulkConnectionsResponse, - TestConnectionData, - TestConnectionResponse, - CreateDefaultConnectionsResponse, - HookMetaDataResponse, - GetDagRunData, - GetDagRunResponse, - DeleteDagRunData, - DeleteDagRunResponse, - PatchDagRunData, - PatchDagRunResponse, - GetUpstreamAssetEventsData, - GetUpstreamAssetEventsResponse, - ClearDagRunData, - ClearDagRunResponse, - GetDagRunsData, - GetDagRunsResponse, - TriggerDagRunData, - TriggerDagRunResponse, - GetListDagRunsBatchData, - GetListDagRunsBatchResponse, - GetDagSourceData, - GetDagSourceResponse, - GetDagStatsData, - GetDagStatsResponse, - GetDagReportsData, - GetDagReportsResponse, - GetConfigData, - GetConfigResponse, - GetConfigValueData, - GetConfigValueResponse, - GetConfigsResponse, - ListDagWarningsData, - ListDagWarningsResponse, - GetDagsData, - GetDagsResponse, - PatchDagsData, - PatchDagsResponse, - GetDagData, - GetDagResponse, - PatchDagData, - PatchDagResponse, - DeleteDagData, - DeleteDagResponse, - GetDagDetailsData, - GetDagDetailsResponse, - GetDagTagsData, - GetDagTagsResponse, - GetEventLogData, - GetEventLogResponse, - GetEventLogsData, - GetEventLogsResponse, - GetExtraLinksData, - GetExtraLinksResponse, - GetTaskInstanceData, - GetTaskInstanceResponse, - PatchTaskInstanceData, - PatchTaskInstanceResponse, - GetMappedTaskInstancesData, - GetMappedTaskInstancesResponse, - GetTaskInstanceDependenciesData, - GetTaskInstanceDependenciesResponse, - GetTaskInstanceDependencies1Data, - GetTaskInstanceDependencies1Response, - GetTaskInstanceTriesData, - GetTaskInstanceTriesResponse, - GetMappedTaskInstanceTriesData, - GetMappedTaskInstanceTriesResponse, - GetMappedTaskInstanceData, - GetMappedTaskInstanceResponse, - PatchTaskInstance1Data, - PatchTaskInstance1Response, - GetTaskInstancesData, - GetTaskInstancesResponse, - GetTaskInstancesBatchData, - GetTaskInstancesBatchResponse, - GetTaskInstanceTryDetailsData, - GetTaskInstanceTryDetailsResponse, - GetMappedTaskInstanceTryDetailsData, - GetMappedTaskInstanceTryDetailsResponse, - PostClearTaskInstancesData, - PostClearTaskInstancesResponse, - PatchTaskInstanceDryRunData, - PatchTaskInstanceDryRunResponse, - PatchTaskInstanceDryRun1Data, - PatchTaskInstanceDryRun1Response, - GetLogData, - GetLogResponse, - GetImportErrorData, - GetImportErrorResponse, - GetImportErrorsData, - GetImportErrorsResponse, - GetJobsData, - GetJobsResponse, - GetPluginsData, - GetPluginsResponse, - DeletePoolData, - DeletePoolResponse, - GetPoolData, - GetPoolResponse, - PatchPoolData, - PatchPoolResponse, - GetPoolsData, - GetPoolsResponse, - PostPoolData, - PostPoolResponse, - BulkPoolsData, - BulkPoolsResponse, - GetProvidersData, - GetProvidersResponse, - GetXcomEntryData, - GetXcomEntryResponse, - UpdateXcomEntryData, - UpdateXcomEntryResponse, - GetXcomEntriesData, - GetXcomEntriesResponse, - CreateXcomEntryData, - CreateXcomEntryResponse, - GetTasksData, - GetTasksResponse, - GetTaskData, - GetTaskResponse, - DeleteVariableData, - DeleteVariableResponse, - GetVariableData, - GetVariableResponse, - PatchVariableData, - PatchVariableResponse, - GetVariablesData, - GetVariablesResponse, - PostVariableData, - PostVariableResponse, - BulkVariablesData, - BulkVariablesResponse, - ReparseDagFileData, - ReparseDagFileResponse, - GetDagVersionData, - GetDagVersionResponse, - GetDagVersionsData, - GetDagVersionsResponse, - GetHealthResponse, - GetVersionResponse, - LoginData, - LoginResponse, - LogoutData, - LogoutResponse, - GetAuthMenusResponse, - RecentDagRunsData, - RecentDagRunsResponse, - GetDependenciesData, - GetDependenciesResponse, - HistoricalMetricsData, - HistoricalMetricsResponse, - StructureDataData, - StructureDataResponse2, - GridDataData, - GridDataResponse, -} from "./types.gen"; -export class AssetService { - /** - * Get Assets - * Get assets. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.uriPattern - * @param data.dagIds - * @param data.onlyActive - * @param data.orderBy - * @returns AssetCollectionResponse Successful Response - * @throws ApiError - */ - public static getAssets(data: GetAssetsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/assets", - query: { - limit: data.limit, - offset: data.offset, - name_pattern: data.namePattern, - uri_pattern: data.uriPattern, - dag_ids: data.dagIds, - only_active: data.onlyActive, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Asset Aliases - * Get asset aliases. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.namePattern - * @param data.orderBy - * @returns AssetAliasCollectionResponse Successful Response - * @throws ApiError - */ - public static getAssetAliases(data: GetAssetAliasesData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/assets/aliases", - query: { - limit: data.limit, - offset: data.offset, - name_pattern: data.namePattern, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Asset Alias - * Get an asset alias. - * @param data The data for the request. - * @param data.assetAliasId - * @returns unknown Successful Response - * @throws ApiError - */ - public static getAssetAlias(data: GetAssetAliasData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/assets/aliases/{asset_alias_id}", - path: { - asset_alias_id: data.assetAliasId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Asset Events - * Get asset events. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.assetId - * @param data.sourceDagId - * @param data.sourceTaskId - * @param data.sourceRunId - * @param data.sourceMapIndex - * @param data.timestampGte - * @param data.timestampLte - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ - public static getAssetEvents(data: GetAssetEventsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/assets/events", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - asset_id: data.assetId, - source_dag_id: data.sourceDagId, - source_task_id: data.sourceTaskId, - source_run_id: data.sourceRunId, - source_map_index: data.sourceMapIndex, - timestamp_gte: data.timestampGte, - timestamp_lte: data.timestampLte, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Create Asset Event - * Create asset events. - * @param data The data for the request. - * @param data.requestBody - * @returns AssetEventResponse Successful Response - * @throws ApiError - */ - public static createAssetEvent(data: CreateAssetEventData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/assets/events", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Materialize Asset - * Materialize an asset by triggering a DAG run that produces it. - * @param data The data for the request. - * @param data.assetId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ - public static materializeAsset(data: MaterializeAssetData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/assets/{asset_id}/materialize", - path: { - asset_id: data.assetId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Get Asset Queued Events - * Get queued asset events for an asset. - * @param data The data for the request. - * @param data.assetId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ - public static getAssetQueuedEvents( - data: GetAssetQueuedEventsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/assets/{asset_id}/queuedEvents", - path: { - asset_id: data.assetId, - }, - query: { - before: data.before, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Delete Asset Queued Events - * Delete queued asset events for an asset. - * @param data The data for the request. - * @param data.assetId - * @param data.before - * @returns void Successful Response - * @throws ApiError - */ - public static deleteAssetQueuedEvents( - data: DeleteAssetQueuedEventsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/assets/{asset_id}/queuedEvents", - path: { - asset_id: data.assetId, - }, - query: { - before: data.before, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Asset - * Get an asset. - * @param data The data for the request. - * @param data.assetId - * @returns AssetResponse Successful Response - * @throws ApiError - */ - public static getAsset(data: GetAssetData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/assets/{asset_id}", - path: { - asset_id: data.assetId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Dag Asset Queued Events - * Get queued asset events for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.before - * @returns QueuedEventCollectionResponse Successful Response - * @throws ApiError - */ - public static getDagAssetQueuedEvents( - data: GetDagAssetQueuedEventsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/assets/queuedEvents", - path: { - dag_id: data.dagId, - }, - query: { - before: data.before, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Delete Dag Asset Queued Events - * @param data The data for the request. - * @param data.dagId - * @param data.before - * @returns void Successful Response - * @throws ApiError - */ - public static deleteDagAssetQueuedEvents( - data: DeleteDagAssetQueuedEventsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/dags/{dag_id}/assets/queuedEvents", - path: { - dag_id: data.dagId, - }, - query: { - before: data.before, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } +import type { CancelablePromise } from './core/CancelablePromise'; +import { OpenAPI } from './core/OpenAPI'; +import { request as __request } from './core/request'; +import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetLatestRunInfoData, GetLatestRunInfoResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, UpdateHitlDetailData, UpdateHitlDetailResponse, GetHitlDetailData, GetHitlDetailResponse, UpdateMappedTiHitlDetailData, UpdateMappedTiHitlDetailResponse, GetMappedTiHitlDetailData, GetMappedTiHitlDetailResponse, GetHitlDetailsData, GetHitlDetailsResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, RefreshData, RefreshResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; - /** - * Get Dag Asset Queued Event - * Get a queued asset event for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.assetId - * @param data.before - * @returns QueuedEventResponse Successful Response - * @throws ApiError - */ - public static getDagAssetQueuedEvent( - data: GetDagAssetQueuedEventData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents", - path: { - dag_id: data.dagId, - asset_id: data.assetId, - }, - query: { - before: data.before, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Delete Dag Asset Queued Event - * Delete a queued asset event for a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.assetId - * @param data.before - * @returns void Successful Response - * @throws ApiError - */ - public static deleteDagAssetQueuedEvent( - data: DeleteDagAssetQueuedEventData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents", - path: { - dag_id: data.dagId, - asset_id: data.assetId, - }, - query: { - before: data.before, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Next Run Assets - * @param data The data for the request. - * @param data.dagId - * @returns unknown Successful Response - * @throws ApiError - */ - public static nextRunAssets(data: NextRunAssetsData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/next_run_assets/{dag_id}", - path: { - dag_id: data.dagId, - }, - errors: { - 422: "Validation Error", - }, - }); - } +export class AssetService { + /** + * Get Assets + * Get assets. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.uriPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.dagIds + * @param data.onlyActive + * @param data.orderBy + * @returns AssetCollectionResponse Successful Response + * @throws ApiError + */ + public static getAssets(data: GetAssetsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/assets', + query: { + limit: data.limit, + offset: data.offset, + name_pattern: data.namePattern, + uri_pattern: data.uriPattern, + dag_ids: data.dagIds, + only_active: data.onlyActive, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Asset Aliases + * Get asset aliases. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.orderBy + * @returns AssetAliasCollectionResponse Successful Response + * @throws ApiError + */ + public static getAssetAliases(data: GetAssetAliasesData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/assets/aliases', + query: { + limit: data.limit, + offset: data.offset, + name_pattern: data.namePattern, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Asset Alias + * Get an asset alias. + * @param data The data for the request. + * @param data.assetAliasId + * @returns unknown Successful Response + * @throws ApiError + */ + public static getAssetAlias(data: GetAssetAliasData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/assets/aliases/{asset_alias_id}', + path: { + asset_alias_id: data.assetAliasId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Asset Events + * Get asset events. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.assetId + * @param data.sourceDagId + * @param data.sourceTaskId + * @param data.sourceRunId + * @param data.sourceMapIndex + * @param data.timestampGte + * @param data.timestampLte + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ + public static getAssetEvents(data: GetAssetEventsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/assets/events', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + asset_id: data.assetId, + source_dag_id: data.sourceDagId, + source_task_id: data.sourceTaskId, + source_run_id: data.sourceRunId, + source_map_index: data.sourceMapIndex, + timestamp_gte: data.timestampGte, + timestamp_lte: data.timestampLte + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Create Asset Event + * Create asset events. + * @param data The data for the request. + * @param data.requestBody + * @returns AssetEventResponse Successful Response + * @throws ApiError + */ + public static createAssetEvent(data: CreateAssetEventData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/assets/events', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Materialize Asset + * Materialize an asset by triggering a DAG run that produces it. + * @param data The data for the request. + * @param data.assetId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static materializeAsset(data: MaterializeAssetData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/assets/{asset_id}/materialize', + path: { + asset_id: data.assetId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Asset Queued Events + * Get queued asset events for an asset. + * @param data The data for the request. + * @param data.assetId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ + public static getAssetQueuedEvents(data: GetAssetQueuedEventsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/assets/{asset_id}/queuedEvents', + path: { + asset_id: data.assetId + }, + query: { + before: data.before + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Delete Asset Queued Events + * Delete queued asset events for an asset. + * @param data The data for the request. + * @param data.assetId + * @param data.before + * @returns void Successful Response + * @throws ApiError + */ + public static deleteAssetQueuedEvents(data: DeleteAssetQueuedEventsData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/assets/{asset_id}/queuedEvents', + path: { + asset_id: data.assetId + }, + query: { + before: data.before + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Asset + * Get an asset. + * @param data The data for the request. + * @param data.assetId + * @returns AssetResponse Successful Response + * @throws ApiError + */ + public static getAsset(data: GetAssetData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/assets/{asset_id}', + path: { + asset_id: data.assetId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Dag Asset Queued Events + * Get queued asset events for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagAssetQueuedEvents(data: GetDagAssetQueuedEventsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/assets/queuedEvents', + path: { + dag_id: data.dagId + }, + query: { + before: data.before + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Delete Dag Asset Queued Events + * @param data The data for the request. + * @param data.dagId + * @param data.before + * @returns void Successful Response + * @throws ApiError + */ + public static deleteDagAssetQueuedEvents(data: DeleteDagAssetQueuedEventsData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/dags/{dag_id}/assets/queuedEvents', + path: { + dag_id: data.dagId + }, + query: { + before: data.before + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Dag Asset Queued Event + * Get a queued asset event for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.assetId + * @param data.before + * @returns QueuedEventResponse Successful Response + * @throws ApiError + */ + public static getDagAssetQueuedEvent(data: GetDagAssetQueuedEventData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents', + path: { + dag_id: data.dagId, + asset_id: data.assetId + }, + query: { + before: data.before + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Delete Dag Asset Queued Event + * Delete a queued asset event for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.assetId + * @param data.before + * @returns void Successful Response + * @throws ApiError + */ + public static deleteDagAssetQueuedEvent(data: DeleteDagAssetQueuedEventData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents', + path: { + dag_id: data.dagId, + asset_id: data.assetId + }, + query: { + before: data.before + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Next Run Assets + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ + public static nextRunAssets(data: NextRunAssetsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/next_run_assets/{dag_id}', + path: { + dag_id: data.dagId + }, + errors: { + 422: 'Validation Error' + } + }); + } + } export class BackfillService { - /** - * List Backfills - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ - public static listBackfills(data: ListBackfillsData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/backfills", - query: { - dag_id: data.dagId, - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } - - /** - * Create Backfill - * @param data The data for the request. - * @param data.requestBody - * @returns BackfillResponse Successful Response - * @throws ApiError - */ - public static createBackfill(data: CreateBackfillData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/backfills", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Get Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ - public static getBackfill(data: GetBackfillData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/backfills/{backfill_id}", - path: { - backfill_id: data.backfillId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Pause Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ - public static pauseBackfill(data: PauseBackfillData): CancelablePromise { - return __request(OpenAPI, { - method: "PUT", - url: "/api/v2/backfills/{backfill_id}/pause", - path: { - backfill_id: data.backfillId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Unpause Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ - public static unpauseBackfill(data: UnpauseBackfillData): CancelablePromise { - return __request(OpenAPI, { - method: "PUT", - url: "/api/v2/backfills/{backfill_id}/unpause", - path: { - backfill_id: data.backfillId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Cancel Backfill - * @param data The data for the request. - * @param data.backfillId - * @returns BackfillResponse Successful Response - * @throws ApiError - */ - public static cancelBackfill(data: CancelBackfillData): CancelablePromise { - return __request(OpenAPI, { - method: "PUT", - url: "/api/v2/backfills/{backfill_id}/cancel", - path: { - backfill_id: data.backfillId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Create Backfill Dry Run - * @param data The data for the request. - * @param data.requestBody - * @returns DryRunBackfillCollectionResponse Successful Response - * @throws ApiError - */ - public static createBackfillDryRun( - data: CreateBackfillDryRunData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/backfills/dry_run", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * List Backfills - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.active - * @returns BackfillCollectionResponse Successful Response - * @throws ApiError - */ - public static listBackfills1(data: ListBackfills1Data = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/backfills", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - dag_id: data.dagId, - active: data.active, - }, - errors: { - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ + public static listBackfills(data: ListBackfillsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/backfills', + query: { + dag_id: data.dagId, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Create Backfill + * @param data The data for the request. + * @param data.requestBody + * @returns BackfillResponse Successful Response + * @throws ApiError + */ + public static createBackfill(data: CreateBackfillData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/backfills', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ + public static getBackfill(data: GetBackfillData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/backfills/{backfill_id}', + path: { + backfill_id: data.backfillId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Pause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ + public static pauseBackfill(data: PauseBackfillData): CancelablePromise { + return __request(OpenAPI, { + method: 'PUT', + url: '/api/v2/backfills/{backfill_id}/pause', + path: { + backfill_id: data.backfillId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Unpause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ + public static unpauseBackfill(data: UnpauseBackfillData): CancelablePromise { + return __request(OpenAPI, { + method: 'PUT', + url: '/api/v2/backfills/{backfill_id}/unpause', + path: { + backfill_id: data.backfillId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Cancel Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ + public static cancelBackfill(data: CancelBackfillData): CancelablePromise { + return __request(OpenAPI, { + method: 'PUT', + url: '/api/v2/backfills/{backfill_id}/cancel', + path: { + backfill_id: data.backfillId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Create Backfill Dry Run + * @param data The data for the request. + * @param data.requestBody + * @returns DryRunBackfillCollectionResponse Successful Response + * @throws ApiError + */ + public static createBackfillDryRun(data: CreateBackfillDryRunData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/backfills/dry_run', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * List Backfills Ui + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.active + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ + public static listBackfillsUi(data: ListBackfillsUiData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/backfills', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + dag_id: data.dagId, + active: data.active + }, + errors: { + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class ConnectionService { - /** - * Delete Connection - * Delete a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns void Successful Response - * @throws ApiError - */ - public static deleteConnection(data: DeleteConnectionData): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/connections/{connection_id}", - path: { - connection_id: data.connectionId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Connection - * Get a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ - public static getConnection(data: GetConnectionData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/connections/{connection_id}", - path: { - connection_id: data.connectionId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Connection - * Update a connection entry. - * @param data The data for the request. - * @param data.connectionId - * @param data.requestBody - * @param data.updateMask - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ - public static patchConnection(data: PatchConnectionData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/connections/{connection_id}", - path: { - connection_id: data.connectionId, - }, - query: { - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Connections - * Get all connection entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.connectionIdPattern - * @returns ConnectionCollectionResponse Successful Response - * @throws ApiError - */ - public static getConnections(data: GetConnectionsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/connections", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - connection_id_pattern: data.connectionIdPattern, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Post Connection - * Create connection entry. - * @param data The data for the request. - * @param data.requestBody - * @returns ConnectionResponse Successful Response - * @throws ApiError - */ - public static postConnection(data: PostConnectionData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/connections", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Bulk Connections - * Bulk create, update, and delete connections. - * @param data The data for the request. - * @param data.requestBody - * @returns BulkResponse Successful Response - * @throws ApiError - */ - public static bulkConnections(data: BulkConnectionsData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/connections", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } - - /** - * Test Connection - * Test an API connection. - * - * This method first creates an in-memory transient conn_id & exports that to an env var, - * as some hook classes tries to find out the `conn` from their __init__ method & errors out if not found. - * It also deletes the conn id env connection after the test. - * @param data The data for the request. - * @param data.requestBody - * @returns ConnectionTestResponse Successful Response - * @throws ApiError - */ - public static testConnection(data: TestConnectionData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/connections/test", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } - - /** - * Create Default Connections - * Create default connections. - * @returns void Successful Response - * @throws ApiError - */ - public static createDefaultConnections(): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/connections/defaults", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - }, - }); - } - - /** - * Hook Meta Data - * Retrieve information about available connection types (hook classes) and their parameters. - * @returns ConnectionHookMetaData Successful Response - * @throws ApiError - */ - public static hookMetaData(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/connections/hook_meta", - }); - } + /** + * Delete Connection + * Delete a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns void Successful Response + * @throws ApiError + */ + public static deleteConnection(data: DeleteConnectionData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/connections/{connection_id}', + path: { + connection_id: data.connectionId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ + public static getConnection(data: GetConnectionData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/connections/{connection_id}', + path: { + connection_id: data.connectionId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Connection + * Update a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @param data.requestBody + * @param data.updateMask + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ + public static patchConnection(data: PatchConnectionData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/connections/{connection_id}', + path: { + connection_id: data.connectionId + }, + query: { + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.connectionIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ + public static getConnections(data: GetConnectionsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/connections', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + connection_id_pattern: data.connectionIdPattern + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Post Connection + * Create connection entry. + * @param data The data for the request. + * @param data.requestBody + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ + public static postConnection(data: PostConnectionData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/connections', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Bulk Connections + * Bulk create, update, and delete connections. + * @param data The data for the request. + * @param data.requestBody + * @returns BulkResponse Successful Response + * @throws ApiError + */ + public static bulkConnections(data: BulkConnectionsData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/connections', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Test Connection + * Test an API connection. + * + * This method first creates an in-memory transient conn_id & exports that to an env var, + * as some hook classes tries to find out the `conn` from their __init__ method & errors out if not found. + * It also deletes the conn id env connection after the test. + * @param data The data for the request. + * @param data.requestBody + * @returns ConnectionTestResponse Successful Response + * @throws ApiError + */ + public static testConnection(data: TestConnectionData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/connections/test', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Create Default Connections + * Create default connections. + * @returns void Successful Response + * @throws ApiError + */ + public static createDefaultConnections(): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/connections/defaults', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden' + } + }); + } + + /** + * Hook Meta Data + * Retrieve information about available connection types (hook classes) and their parameters. + * @returns ConnectionHookMetaData Successful Response + * @throws ApiError + */ + public static hookMetaData(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/connections/hook_meta' + }); + } + } export class DagRunService { - /** - * Get Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ - public static getDagRun(data: GetDagRunData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Delete Dag Run - * Delete a DAG Run entry. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns void Successful Response - * @throws ApiError - */ - public static deleteDagRun(data: DeleteDagRunData): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Dag Run - * Modify a DAG Run. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @param data.updateMask - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ - public static patchDagRun(data: PatchDagRunData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - query: { - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Upstream Asset Events - * If dag run is asset-triggered, return the asset events that triggered it. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns AssetEventCollectionResponse Successful Response - * @throws ApiError - */ - public static getUpstreamAssetEvents( - data: GetUpstreamAssetEventsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamAssetEvents", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Clear Dag Run - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @returns unknown Successful Response - * @throws ApiError - */ - public static clearDagRun(data: ClearDagRunData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/clear", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Dag Runs - * Get all DAG Runs. - * - * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.runType - * @param data.state - * @param data.orderBy - * @returns DAGRunCollectionResponse Successful Response - * @throws ApiError - */ - public static getDagRuns(data: GetDagRunsData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns", - path: { - dag_id: data.dagId, - }, - query: { - limit: data.limit, - offset: data.offset, - run_after_gte: data.runAfterGte, - run_after_lte: data.runAfterLte, - logical_date_gte: data.logicalDateGte, - logical_date_lte: data.logicalDateLte, - start_date_gte: data.startDateGte, - start_date_lte: data.startDateLte, - end_date_gte: data.endDateGte, - end_date_lte: data.endDateLte, - updated_at_gte: data.updatedAtGte, - updated_at_lte: data.updatedAtLte, - run_type: data.runType, - state: data.state, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Trigger Dag Run - * Trigger a DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @returns DAGRunResponse Successful Response - * @throws ApiError - */ - public static triggerDagRun(data: TriggerDagRunData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/dags/{dag_id}/dagRuns", - path: { - dag_id: data.dagId, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } + /** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static getDagRun(data: GetDagRunData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Delete Dag Run + * Delete a DAG Run entry. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns void Successful Response + * @throws ApiError + */ + public static deleteDagRun(data: DeleteDagRunData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Dag Run + * Modify a DAG Run. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @param data.updateMask + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static patchDagRun(data: PatchDagRunData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + query: { + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Upstream Asset Events + * If dag run is asset-triggered, return the asset events that triggered it. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ + public static getUpstreamAssetEvents(data: GetUpstreamAssetEventsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamAssetEvents', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Clear Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @returns unknown Successful Response + * @throws ApiError + */ + public static clearDagRun(data: ClearDagRunData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/clear', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Dag Runs + * Get all DAG Runs. + * + * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.runType + * @param data.state + * @param data.orderBy + * @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.triggeringUserNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagRuns(data: GetDagRunsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns', + path: { + dag_id: data.dagId + }, + query: { + limit: data.limit, + offset: data.offset, + run_after_gte: data.runAfterGte, + run_after_lte: data.runAfterLte, + logical_date_gte: data.logicalDateGte, + logical_date_lte: data.logicalDateLte, + start_date_gte: data.startDateGte, + start_date_lte: data.startDateLte, + end_date_gte: data.endDateGte, + end_date_lte: data.endDateLte, + updated_at_gte: data.updatedAtGte, + updated_at_lte: data.updatedAtLte, + run_type: data.runType, + state: data.state, + order_by: data.orderBy, + run_id_pattern: data.runIdPattern, + triggering_user_name_pattern: data.triggeringUserNamePattern + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Trigger Dag Run + * Trigger a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static triggerDagRun(data: TriggerDagRunData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/dagRuns', + path: { + dag_id: data.dagId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Experimental: Wait for a dag run to complete, and return task results if requested. + * 🚧 This is an experimental endpoint and may change or be removed without notice. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.interval Seconds to wait between dag run state checks + * @param data.result Collect result XCom from task. Can be set multiple times. + * @returns unknown Successful Response + * @throws ApiError + */ + public static waitDagRunUntilFinished(data: WaitDagRunUntilFinishedData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + query: { + interval: data.interval, + result: data.result + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get List Dag Runs Batch + * Get a list of DAG Runs. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ + public static getListDagRunsBatch(data: GetListDagRunsBatchData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/dagRuns/list', + path: { + dag_id: data.dagId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + +} - /** - * Get List Dag Runs Batch - * Get a list of DAG Runs. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @returns DAGRunCollectionResponse Successful Response - * @throws ApiError - */ - public static getListDagRunsBatch( - data: GetListDagRunsBatchData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/dags/{dag_id}/dagRuns/list", - path: { - dag_id: data.dagId, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } +export class ExperimentalService { + /** + * Experimental: Wait for a dag run to complete, and return task results if requested. + * 🚧 This is an experimental endpoint and may change or be removed without notice. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.interval Seconds to wait between dag run state checks + * @param data.result Collect result XCom from task. Can be set multiple times. + * @returns unknown Successful Response + * @throws ApiError + */ + public static waitDagRunUntilFinished(data: WaitDagRunUntilFinishedData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + query: { + interval: data.interval, + result: data.result + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class DagSourceService { - /** - * Get Dag Source - * Get source code using file token. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @param data.accept - * @returns DAGSourceResponse Successful Response - * @throws ApiError - */ - public static getDagSource(data: GetDagSourceData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dagSources/{dag_id}", - path: { - dag_id: data.dagId, - }, - headers: { - accept: data.accept, - }, - query: { - version_number: data.versionNumber, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 406: "Not Acceptable", - 422: "Validation Error", - }, - }); - } + /** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ + public static getDagSource(data: GetDagSourceData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dagSources/{dag_id}', + path: { + dag_id: data.dagId + }, + headers: { + accept: data.accept + }, + query: { + version_number: data.versionNumber + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 406: 'Not Acceptable', + 422: 'Validation Error' + } + }); + } + } export class DagStatsService { - /** - * Get Dag Stats - * Get Dag statistics. - * @param data The data for the request. - * @param data.dagIds - * @returns DagStatsCollectionResponse Successful Response - * @throws ApiError - */ - public static getDagStats(data: GetDagStatsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dagStats", - query: { - dag_ids: data.dagIds, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagStats(data: GetDagStatsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dagStats', + query: { + dag_ids: data.dagIds + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class DagReportService { - /** - * Get Dag Reports - * Get DAG report. - * @param data The data for the request. - * @param data.subdir - * @returns unknown Successful Response - * @throws ApiError - */ - public static getDagReports(data: GetDagReportsData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dagReports", - query: { - subdir: data.subdir, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Get Dag Reports + * Get DAG report. + * @param data The data for the request. + * @param data.subdir + * @returns unknown Successful Response + * @throws ApiError + */ + public static getDagReports(data: GetDagReportsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dagReports', + query: { + subdir: data.subdir + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class ConfigService { - /** - * Get Config - * @param data The data for the request. - * @param data.section - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ - public static getConfig(data: GetConfigData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/config", - headers: { - accept: data.accept, - }, - query: { - section: data.section, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 406: "Not Acceptable", - 422: "Validation Error", - }, - }); - } - - /** - * Get Config Value - * @param data The data for the request. - * @param data.section - * @param data.option - * @param data.accept - * @returns Config Successful Response - * @throws ApiError - */ - public static getConfigValue(data: GetConfigValueData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/config/section/{section}/option/{option}", - path: { - section: data.section, - option: data.option, - }, - headers: { - accept: data.accept, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 406: "Not Acceptable", - 422: "Validation Error", - }, - }); - } - - /** - * Get Configs - * Get configs for UI. - * @returns ConfigResponse Successful Response - * @throws ApiError - */ - public static getConfigs(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/config", - errors: { - 404: "Not Found", - }, - }); - } + /** + * Get Config + * @param data The data for the request. + * @param data.section + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ + public static getConfig(data: GetConfigData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/config', + headers: { + accept: data.accept + }, + query: { + section: data.section + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 406: 'Not Acceptable', + 422: 'Validation Error' + } + }); + } + + /** + * Get Config Value + * @param data The data for the request. + * @param data.section + * @param data.option + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ + public static getConfigValue(data: GetConfigValueData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/config/section/{section}/option/{option}', + path: { + section: data.section, + option: data.option + }, + headers: { + accept: data.accept + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 406: 'Not Acceptable', + 422: 'Validation Error' + } + }); + } + + /** + * Get Configs + * Get configs for UI. + * @returns ConfigResponse Successful Response + * @throws ApiError + */ + public static getConfigs(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/config', + errors: { + 404: 'Not Found' + } + }); + } + } export class DagWarningService { - /** - * List Dag Warnings - * Get a list of DAG warnings. - * @param data The data for the request. - * @param data.dagId - * @param data.warningType - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns DAGWarningCollectionResponse Successful Response - * @throws ApiError - */ - public static listDagWarnings(data: ListDagWarningsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dagWarnings", - query: { - dag_id: data.dagId, - warning_type: data.warningType, - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ + public static listDagWarnings(data: ListDagWarningsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dagWarnings', + query: { + dag_id: data.dagId, + warning_type: data.warningType, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class DagService { - /** - * Get Dags - * Get all DAGs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @param data.dagRunStartDateGte - * @param data.dagRunStartDateLte - * @param data.dagRunEndDateGte - * @param data.dagRunEndDateLte - * @param data.dagRunState - * @param data.orderBy - * @returns DAGCollectionResponse Successful Response - * @throws ApiError - */ - public static getDags(data: GetDagsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags", - query: { - limit: data.limit, - offset: data.offset, - tags: data.tags, - tags_match_mode: data.tagsMatchMode, - owners: data.owners, - dag_id_pattern: data.dagIdPattern, - dag_display_name_pattern: data.dagDisplayNamePattern, - exclude_stale: data.excludeStale, - paused: data.paused, - last_dag_run_state: data.lastDagRunState, - dag_run_start_date_gte: data.dagRunStartDateGte, - dag_run_start_date_lte: data.dagRunStartDateLte, - dag_run_end_date_gte: data.dagRunEndDateGte, - dag_run_end_date_lte: data.dagRunEndDateLte, - dag_run_state: data.dagRunState, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Dags - * Patch multiple DAGs. - * @param data The data for the request. - * @param data.requestBody - * @param data.updateMask - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIdPattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @returns DAGCollectionResponse Successful Response - * @throws ApiError - */ - public static patchDags(data: PatchDagsData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags", - query: { - update_mask: data.updateMask, - limit: data.limit, - offset: data.offset, - tags: data.tags, - tags_match_mode: data.tagsMatchMode, - owners: data.owners, - dag_id_pattern: data.dagIdPattern, - exclude_stale: data.excludeStale, - paused: data.paused, - last_dag_run_state: data.lastDagRunState, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Dag - * Get basic information about a DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGResponse Successful Response - * @throws ApiError - */ - public static getDag(data: GetDagData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}", - path: { - dag_id: data.dagId, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Unprocessable Entity", - }, - }); - } - - /** - * Patch Dag - * Patch the specific DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @param data.updateMask - * @returns DAGResponse Successful Response - * @throws ApiError - */ - public static patchDag(data: PatchDagData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags/{dag_id}", - path: { - dag_id: data.dagId, - }, - query: { - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Delete Dag - * Delete the specific DAG. - * @param data The data for the request. - * @param data.dagId - * @returns unknown Successful Response - * @throws ApiError - */ - public static deleteDag(data: DeleteDagData): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/dags/{dag_id}", - path: { - dag_id: data.dagId, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Unprocessable Entity", - }, - }); - } - - /** - * Get Dag Details - * Get details of DAG. - * @param data The data for the request. - * @param data.dagId - * @returns DAGDetailsResponse Successful Response - * @throws ApiError - */ - public static getDagDetails(data: GetDagDetailsData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/details", - path: { - dag_id: data.dagId, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Dag Tags - * Get all DAG tags. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.tagNamePattern - * @returns DAGTagCollectionResponse Successful Response - * @throws ApiError - */ - public static getDagTags(data: GetDagTagsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dagTags", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - tag_name_pattern: data.tagNamePattern, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Get Dags + * Get all DAGs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @param data.bundleName + * @param data.bundleVersion + * @param data.dagRunStartDateGte + * @param data.dagRunStartDateLte + * @param data.dagRunEndDateGte + * @param data.dagRunEndDateLte + * @param data.dagRunState + * @param data.orderBy + * @param data.isFavorite + * @returns DAGCollectionResponse Successful Response + * @throws ApiError + */ + public static getDags(data: GetDagsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags', + query: { + limit: data.limit, + offset: data.offset, + tags: data.tags, + tags_match_mode: data.tagsMatchMode, + owners: data.owners, + dag_id_pattern: data.dagIdPattern, + dag_display_name_pattern: data.dagDisplayNamePattern, + exclude_stale: data.excludeStale, + paused: data.paused, + last_dag_run_state: data.lastDagRunState, + bundle_name: data.bundleName, + bundle_version: data.bundleVersion, + dag_run_start_date_gte: data.dagRunStartDateGte, + dag_run_start_date_lte: data.dagRunStartDateLte, + dag_run_end_date_gte: data.dagRunEndDateGte, + dag_run_end_date_lte: data.dagRunEndDateLte, + dag_run_state: data.dagRunState, + order_by: data.orderBy, + is_favorite: data.isFavorite + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Dags + * Patch multiple DAGs. + * @param data The data for the request. + * @param data.requestBody + * @param data.updateMask + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.excludeStale + * @param data.paused + * @returns DAGCollectionResponse Successful Response + * @throws ApiError + */ + public static patchDags(data: PatchDagsData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags', + query: { + update_mask: data.updateMask, + limit: data.limit, + offset: data.offset, + tags: data.tags, + tags_match_mode: data.tagsMatchMode, + owners: data.owners, + dag_id_pattern: data.dagIdPattern, + exclude_stale: data.excludeStale, + paused: data.paused + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ + public static getDag(data: GetDagData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}', + path: { + dag_id: data.dagId + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Unprocessable Entity' + } + }); + } + + /** + * Patch Dag + * Patch the specific DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @param data.updateMask + * @returns DAGResponse Successful Response + * @throws ApiError + */ + public static patchDag(data: PatchDagData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}', + path: { + dag_id: data.dagId + }, + query: { + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Delete Dag + * Delete the specific DAG. + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ + public static deleteDag(data: DeleteDagData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/dags/{dag_id}', + path: { + dag_id: data.dagId + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Unprocessable Entity' + } + }); + } + + /** + * Get Dag Details + * Get details of DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGDetailsResponse Successful Response + * @throws ApiError + */ + public static getDagDetails(data: GetDagDetailsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/details', + path: { + dag_id: data.dagId + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Favorite Dag + * Mark the DAG as favorite. + * @param data The data for the request. + * @param data.dagId + * @returns void Successful Response + * @throws ApiError + */ + public static favoriteDag(data: FavoriteDagData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/favorite', + path: { + dag_id: data.dagId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Unfavorite Dag + * Unmark the DAG as favorite. + * @param data The data for the request. + * @param data.dagId + * @returns void Successful Response + * @throws ApiError + */ + public static unfavoriteDag(data: UnfavoriteDagData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/unfavorite', + path: { + dag_id: data.dagId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagTags(data: GetDagTagsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dagTags', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + tag_name_pattern: data.tagNamePattern + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Get Dags + * Get DAGs with recent DagRun. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIds + * @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.dagDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @param data.bundleName + * @param data.bundleVersion + * @param data.orderBy + * @param data.isFavorite + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagsUi(data: GetDagsUiData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/dags', + query: { + dag_runs_limit: data.dagRunsLimit, + limit: data.limit, + offset: data.offset, + tags: data.tags, + tags_match_mode: data.tagsMatchMode, + owners: data.owners, + dag_ids: data.dagIds, + dag_id_pattern: data.dagIdPattern, + dag_display_name_pattern: data.dagDisplayNamePattern, + exclude_stale: data.excludeStale, + paused: data.paused, + last_dag_run_state: data.lastDagRunState, + bundle_name: data.bundleName, + bundle_version: data.bundleVersion, + order_by: data.orderBy, + is_favorite: data.isFavorite + }, + errors: { + 422: 'Validation Error' + } + }); + } + + /** + * Get Latest Run Info + * Get latest run. + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ + public static getLatestRunInfo(data: GetLatestRunInfoData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/dags/{dag_id}/latest_run', + path: { + dag_id: data.dagId + }, + errors: { + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class EventLogService { - /** - * Get Event Log - * @param data The data for the request. - * @param data.eventLogId - * @returns EventLogResponse Successful Response - * @throws ApiError - */ - public static getEventLog(data: GetEventLogData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/eventLogs/{event_log_id}", - path: { - event_log_id: data.eventLogId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Event Logs - * Get all Event Logs. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.dagId - * @param data.taskId - * @param data.runId - * @param data.mapIndex - * @param data.tryNumber - * @param data.owner - * @param data.event - * @param data.excludedEvents - * @param data.includedEvents - * @param data.before - * @param data.after - * @returns EventLogCollectionResponse Successful Response - * @throws ApiError - */ - public static getEventLogs(data: GetEventLogsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/eventLogs", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - dag_id: data.dagId, - task_id: data.taskId, - run_id: data.runId, - map_index: data.mapIndex, - try_number: data.tryNumber, - owner: data.owner, - event: data.event, - excluded_events: data.excludedEvents, - included_events: data.includedEvents, - before: data.before, - after: data.after, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ + public static getEventLog(data: GetEventLogData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/eventLogs/{event_log_id}', + path: { + event_log_id: data.eventLogId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ + public static getEventLogs(data: GetEventLogsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/eventLogs', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + dag_id: data.dagId, + task_id: data.taskId, + run_id: data.runId, + map_index: data.mapIndex, + try_number: data.tryNumber, + owner: data.owner, + event: data.event, + excluded_events: data.excludedEvents, + included_events: data.includedEvents, + before: data.before, + after: data.after + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class ExtraLinksService { - /** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ - public static getExtraLinks(data: GetExtraLinksData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ + public static getExtraLinks(data: GetExtraLinksData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class TaskInstanceService { - /** - * Get Extra Links - * Get extra links for task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns ExtraLinkCollectionResponse Successful Response - * @throws ApiError - */ - public static getExtraLinks(data: GetExtraLinksData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ - public static getTaskInstance(data: GetTaskInstanceData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Task Instance - * Update a task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.requestBody - * @param data.mapIndex - * @param data.updateMask - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ - public static patchTaskInstance(data: PatchTaskInstanceData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - map_index: data.mapIndex, - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Get Mapped Task Instances - * Get list of mapped task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ - public static getMappedTaskInstances( - data: GetMappedTaskInstancesData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - run_after_gte: data.runAfterGte, - run_after_lte: data.runAfterLte, - logical_date_gte: data.logicalDateGte, - logical_date_lte: data.logicalDateLte, - start_date_gte: data.startDateGte, - start_date_lte: data.startDateLte, - end_date_gte: data.endDateGte, - end_date_lte: data.endDateLte, - updated_at_gte: data.updatedAtGte, - updated_at_lte: data.updatedAtLte, - duration_gte: data.durationGte, - duration_lte: data.durationLte, - state: data.state, - pool: data.pool, - queue: data.queue, - executor: data.executor, - version_number: data.versionNumber, - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ - public static getTaskInstanceDependencies( - data: GetTaskInstanceDependenciesData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task Instance Dependencies - * Get dependencies blocking task from getting scheduled. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskDependencyCollectionResponse Successful Response - * @throws ApiError - */ - public static getTaskInstanceDependencies1( - data: GetTaskInstanceDependencies1Data, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task Instance Tries - * Get list of task instances history. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ - public static getTaskInstanceTries( - data: GetTaskInstanceTriesData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Mapped Task Instance Tries - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceHistoryCollectionResponse Successful Response - * @throws ApiError - */ - public static getMappedTaskInstanceTries( - data: GetMappedTaskInstanceTriesData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Mapped Task Instance - * Get task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ - public static getMappedTaskInstance( - data: GetMappedTaskInstanceData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Task Instance - * Update a task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @param data.requestBody - * @param data.updateMask - * @returns TaskInstanceResponse Successful Response - * @throws ApiError - */ - public static patchTaskInstance1( - data: PatchTaskInstance1Data, - ): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - map_index: data.mapIndex, - }, - query: { - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task Instances - * Get list of task instances. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs - * and DAG runs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.updatedAtGte - * @param data.updatedAtLte - * @param data.durationGte - * @param data.durationLte - * @param data.taskDisplayNamePattern - * @param data.state - * @param data.pool - * @param data.queue - * @param data.executor - * @param data.versionNumber - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ - public static getTaskInstances(data: GetTaskInstancesData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - query: { - task_id: data.taskId, - run_after_gte: data.runAfterGte, - run_after_lte: data.runAfterLte, - logical_date_gte: data.logicalDateGte, - logical_date_lte: data.logicalDateLte, - start_date_gte: data.startDateGte, - start_date_lte: data.startDateLte, - end_date_gte: data.endDateGte, - end_date_lte: data.endDateLte, - updated_at_gte: data.updatedAtGte, - updated_at_lte: data.updatedAtLte, - duration_gte: data.durationGte, - duration_lte: data.durationLte, - task_display_name_pattern: data.taskDisplayNamePattern, - state: data.state, - pool: data.pool, - queue: data.queue, - executor: data.executor, - version_number: data.versionNumber, - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task Instances Batch - * Get list of task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.requestBody - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ - public static getTaskInstancesBatch( - data: GetTaskInstancesBatchData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task Instance Try Details - * Get task instance details by try number. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ - public static getTaskInstanceTryDetails( - data: GetTaskInstanceTryDetailsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - task_try_number: data.taskTryNumber, - }, - query: { - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Mapped Task Instance Try Details - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.taskTryNumber - * @param data.mapIndex - * @returns TaskInstanceHistoryResponse Successful Response - * @throws ApiError - */ - public static getMappedTaskInstanceTryDetails( - data: GetMappedTaskInstanceTryDetailsData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - task_try_number: data.taskTryNumber, - map_index: data.mapIndex, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Post Clear Task Instances - * Clear task instances. - * @param data The data for the request. - * @param data.dagId - * @param data.requestBody - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ - public static postClearTaskInstances( - data: PostClearTaskInstancesData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/dags/{dag_id}/clearTaskInstances", - path: { - dag_id: data.dagId, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Task Instance Dry Run - * Update a task instance dry_run mode. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.mapIndex - * @param data.requestBody - * @param data.updateMask - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ - public static patchTaskInstanceDryRun( - data: PatchTaskInstanceDryRunData, - ): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dry_run", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - map_index: data.mapIndex, - }, - query: { - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Task Instance Dry Run - * Update a task instance dry_run mode. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.requestBody - * @param data.mapIndex - * @param data.updateMask - * @returns TaskInstanceCollectionResponse Successful Response - * @throws ApiError - */ - public static patchTaskInstanceDryRun1( - data: PatchTaskInstanceDryRun1Data, - ): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dry_run", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - map_index: data.mapIndex, - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Log - * Get logs for a specific task instance. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.tryNumber - * @param data.fullContent - * @param data.mapIndex - * @param data.token - * @param data.accept - * @returns TaskInstancesLogResponse Successful Response - * @throws ApiError - */ - public static getLog(data: GetLogData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - try_number: data.tryNumber, - }, - headers: { - accept: data.accept, - }, - query: { - full_content: data.fullContent, - map_index: data.mapIndex, - token: data.token, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ + public static getExtraLinks(data: GetExtraLinksData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ + public static getTaskInstance(data: GetTaskInstanceData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Task Instance + * Update a task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.requestBody + * @param data.mapIndex + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static patchTaskInstance(data: PatchTaskInstanceData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + map_index: data.mapIndex, + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Delete Task Instance + * Delete a task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns null Successful Response + * @throws ApiError + */ + public static deleteTaskInstance(data: DeleteTaskInstanceData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Mapped Task Instances + * Get list of mapped task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static getMappedTaskInstances(data: GetMappedTaskInstancesData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + run_after_gte: data.runAfterGte, + run_after_lte: data.runAfterLte, + logical_date_gte: data.logicalDateGte, + logical_date_lte: data.logicalDateLte, + start_date_gte: data.startDateGte, + start_date_lte: data.startDateLte, + end_date_gte: data.endDateGte, + end_date_lte: data.endDateLte, + updated_at_gte: data.updatedAtGte, + updated_at_lte: data.updatedAtLte, + duration_gte: data.durationGte, + duration_lte: data.durationLte, + state: data.state, + pool: data.pool, + queue: data.queue, + executor: data.executor, + version_number: data.versionNumber, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ + public static getTaskInstanceDependenciesByMapIndex(data: GetTaskInstanceDependenciesByMapIndexData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ + public static getTaskInstanceDependencies(data: GetTaskInstanceDependenciesData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ + public static getTaskInstanceTries(data: GetTaskInstanceTriesData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ + public static getMappedTaskInstanceTries(data: GetMappedTaskInstanceTriesData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ + public static getMappedTaskInstance(data: GetMappedTaskInstanceData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Task Instance + * Update a task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static patchTaskInstanceByMapIndex(data: PatchTaskInstanceByMapIndexData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + query: { + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task Instances + * Get list of task instances. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs + * and DAG runs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.taskDisplayNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static getTaskInstances(data: GetTaskInstancesData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + query: { + task_id: data.taskId, + run_after_gte: data.runAfterGte, + run_after_lte: data.runAfterLte, + logical_date_gte: data.logicalDateGte, + logical_date_lte: data.logicalDateLte, + start_date_gte: data.startDateGte, + start_date_lte: data.startDateLte, + end_date_gte: data.endDateGte, + end_date_lte: data.endDateLte, + updated_at_gte: data.updatedAtGte, + updated_at_lte: data.updatedAtLte, + duration_gte: data.durationGte, + duration_lte: data.durationLte, + task_display_name_pattern: data.taskDisplayNamePattern, + state: data.state, + pool: data.pool, + queue: data.queue, + executor: data.executor, + version_number: data.versionNumber, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Bulk Task Instances + * Bulk update, and delete task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @returns BulkResponse Successful Response + * @throws ApiError + */ + public static bulkTaskInstances(data: BulkTaskInstancesData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task Instances Batch + * Get list of task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static getTaskInstancesBatch(data: GetTaskInstancesBatchData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task Instance Try Details + * Get task instance details by try number. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ + public static getTaskInstanceTryDetails(data: GetTaskInstanceTryDetailsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + task_try_number: data.taskTryNumber + }, + query: { + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Mapped Task Instance Try Details + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ + public static getMappedTaskInstanceTryDetails(data: GetMappedTaskInstanceTryDetailsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + task_try_number: data.taskTryNumber, + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Post Clear Task Instances + * Clear task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static postClearTaskInstances(data: PostClearTaskInstancesData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/clearTaskInstances', + path: { + dag_id: data.dagId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Task Instance Dry Run + * Update a task instance dry_run mode. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static patchTaskInstanceDryRunByMapIndex(data: PatchTaskInstanceDryRunByMapIndexData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dry_run', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + query: { + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Task Instance Dry Run + * Update a task instance dry_run mode. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.requestBody + * @param data.mapIndex + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ + public static patchTaskInstanceDryRun(data: PatchTaskInstanceDryRunData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dry_run', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + map_index: data.mapIndex, + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Log + * Get logs for a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.tryNumber + * @param data.fullContent + * @param data.mapIndex + * @param data.token + * @param data.accept + * @returns TaskInstancesLogResponse Successful Response + * @throws ApiError + */ + public static getLog(data: GetLogData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + try_number: data.tryNumber + }, + headers: { + accept: data.accept + }, + query: { + full_content: data.fullContent, + map_index: data.mapIndex, + token: data.token + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get External Log Url + * Get external log URL for a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.tryNumber + * @param data.mapIndex + * @returns ExternalLogUrlResponse Successful Response + * @throws ApiError + */ + public static getExternalLogUrl(data: GetExternalLogUrlData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/externalLogUrl/{try_number}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + try_number: data.tryNumber + }, + query: { + map_index: data.mapIndex + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class ImportErrorService { - /** - * Get Import Error - * Get an import error. - * @param data The data for the request. - * @param data.importErrorId - * @returns ImportErrorResponse Successful Response - * @throws ApiError - */ - public static getImportError(data: GetImportErrorData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/importErrors/{import_error_id}", - path: { - import_error_id: data.importErrorId, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Import Errors - * Get all import errors. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @returns ImportErrorCollectionResponse Successful Response - * @throws ApiError - */ - public static getImportErrors(data: GetImportErrorsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/importErrors", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ + public static getImportError(data: GetImportErrorData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/importErrors/{import_error_id}', + path: { + import_error_id: data.importErrorId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ + public static getImportErrors(data: GetImportErrorsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/importErrors', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class JobService { - /** - * Get Jobs - * Get all jobs. - * @param data The data for the request. - * @param data.isAlive - * @param data.startDateGte - * @param data.startDateLte - * @param data.endDateGte - * @param data.endDateLte - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.jobState - * @param data.jobType - * @param data.hostname - * @param data.executorClass - * @returns JobCollectionResponse Successful Response - * @throws ApiError - */ - public static getJobs(data: GetJobsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/jobs", - query: { - is_alive: data.isAlive, - start_date_gte: data.startDateGte, - start_date_lte: data.startDateLte, - end_date_gte: data.endDateGte, - end_date_lte: data.endDateLte, - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - job_state: data.jobState, - job_type: data.jobType, - hostname: data.hostname, - executor_class: data.executorClass, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Get Jobs + * Get all jobs. + * @param data The data for the request. + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response + * @throws ApiError + */ + public static getJobs(data: GetJobsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/jobs', + query: { + is_alive: data.isAlive, + start_date_gte: data.startDateGte, + start_date_lte: data.startDateLte, + end_date_gte: data.endDateGte, + end_date_lte: data.endDateLte, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + job_state: data.jobState, + job_type: data.jobType, + hostname: data.hostname, + executor_class: data.executorClass + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class PluginService { - /** - * Get Plugins - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns PluginCollectionResponse Successful Response - * @throws ApiError - */ - public static getPlugins(data: GetPluginsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/plugins", - query: { - limit: data.limit, - offset: data.offset, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ + public static getPlugins(data: GetPluginsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/plugins', + query: { + limit: data.limit, + offset: data.offset + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Import Errors + * @returns PluginImportErrorCollectionResponse Successful Response + * @throws ApiError + */ + public static importErrors(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/plugins/importErrors', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden' + } + }); + } + } export class PoolService { - /** - * Delete Pool - * Delete a pool entry. - * @param data The data for the request. - * @param data.poolName - * @returns void Successful Response - * @throws ApiError - */ - public static deletePool(data: DeletePoolData): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/pools/{pool_name}", - path: { - pool_name: data.poolName, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Pool - * Get a pool. - * @param data The data for the request. - * @param data.poolName - * @returns PoolResponse Successful Response - * @throws ApiError - */ - public static getPool(data: GetPoolData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/pools/{pool_name}", - path: { - pool_name: data.poolName, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Pool - * Update a Pool. - * @param data The data for the request. - * @param data.poolName - * @param data.requestBody - * @param data.updateMask - * @returns PoolResponse Successful Response - * @throws ApiError - */ - public static patchPool(data: PatchPoolData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/pools/{pool_name}", - path: { - pool_name: data.poolName, - }, - query: { - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Pools - * Get all pools entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.poolNamePattern - * @returns PoolCollectionResponse Successful Response - * @throws ApiError - */ - public static getPools(data: GetPoolsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/pools", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - pool_name_pattern: data.poolNamePattern, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Post Pool - * Create a Pool. - * @param data The data for the request. - * @param data.requestBody - * @returns PoolResponse Successful Response - * @throws ApiError - */ - public static postPool(data: PostPoolData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/pools", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Bulk Pools - * Bulk create, update, and delete pools. - * @param data The data for the request. - * @param data.requestBody - * @returns BulkResponse Successful Response - * @throws ApiError - */ - public static bulkPools(data: BulkPoolsData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/pools", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Delete Pool + * Delete a pool entry. + * @param data The data for the request. + * @param data.poolName + * @returns void Successful Response + * @throws ApiError + */ + public static deletePool(data: DeletePoolData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/pools/{pool_name}', + path: { + pool_name: data.poolName + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ + public static getPool(data: GetPoolData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/pools/{pool_name}', + path: { + pool_name: data.poolName + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Pool + * Update a Pool. + * @param data The data for the request. + * @param data.poolName + * @param data.requestBody + * @param data.updateMask + * @returns PoolResponse Successful Response + * @throws ApiError + */ + public static patchPool(data: PatchPoolData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/pools/{pool_name}', + path: { + pool_name: data.poolName + }, + query: { + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.poolNamePattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ + public static getPools(data: GetPoolsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/pools', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + pool_name_pattern: data.poolNamePattern + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Post Pool + * Create a Pool. + * @param data The data for the request. + * @param data.requestBody + * @returns PoolResponse Successful Response + * @throws ApiError + */ + public static postPool(data: PostPoolData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/pools', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Bulk Pools + * Bulk create, update, and delete pools. + * @param data The data for the request. + * @param data.requestBody + * @returns BulkResponse Successful Response + * @throws ApiError + */ + public static bulkPools(data: BulkPoolsData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/pools', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class ProviderService { - /** - * Get Providers - * Get providers. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @returns ProviderCollectionResponse Successful Response - * @throws ApiError - */ - public static getProviders(data: GetProvidersData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/providers", - query: { - limit: data.limit, - offset: data.offset, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ + public static getProviders(data: GetProvidersData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/providers', + query: { + limit: data.limit, + offset: data.offset + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class XcomService { - /** - * Get Xcom Entry - * Get an XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.xcomKey - * @param data.mapIndex - * @param data.deserialize - * @param data.stringify - * @returns unknown Successful Response - * @throws ApiError - */ - public static getXcomEntry(data: GetXcomEntryData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}", - path: { - dag_id: data.dagId, - task_id: data.taskId, - dag_run_id: data.dagRunId, - xcom_key: data.xcomKey, - }, - query: { - map_index: data.mapIndex, - deserialize: data.deserialize, - stringify: data.stringify, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Update Xcom Entry - * Update an existing XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.xcomKey - * @param data.requestBody - * @returns XComResponseNative Successful Response - * @throws ApiError - */ - public static updateXcomEntry(data: UpdateXcomEntryData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}", - path: { - dag_id: data.dagId, - task_id: data.taskId, - dag_run_id: data.dagRunId, - xcom_key: data.xcomKey, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Xcom Entries - * Get all XCom entries. - * - * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @param data.taskId - * @param data.xcomKey - * @param data.mapIndex - * @param data.limit - * @param data.offset - * @returns XComCollectionResponse Successful Response - * @throws ApiError - */ - public static getXcomEntries(data: GetXcomEntriesData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries", - path: { - dag_id: data.dagId, - dag_run_id: data.dagRunId, - task_id: data.taskId, - }, - query: { - xcom_key: data.xcomKey, - map_index: data.mapIndex, - limit: data.limit, - offset: data.offset, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Create Xcom Entry - * Create an XCom entry. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @param data.dagRunId - * @param data.requestBody - * @returns XComResponseNative Successful Response - * @throws ApiError - */ - public static createXcomEntry(data: CreateXcomEntryData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries", - path: { - dag_id: data.dagId, - task_id: data.taskId, - dag_run_id: data.dagRunId, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Xcom Entry + * Get an XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.xcomKey + * @param data.mapIndex + * @param data.deserialize + * @param data.stringify + * @returns unknown Successful Response + * @throws ApiError + */ + public static getXcomEntry(data: GetXcomEntryData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}', + path: { + dag_id: data.dagId, + task_id: data.taskId, + dag_run_id: data.dagRunId, + xcom_key: data.xcomKey + }, + query: { + map_index: data.mapIndex, + deserialize: data.deserialize, + stringify: data.stringify + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Update Xcom Entry + * Update an existing XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.xcomKey + * @param data.requestBody + * @returns XComResponseNative Successful Response + * @throws ApiError + */ + public static updateXcomEntry(data: UpdateXcomEntryData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}', + path: { + dag_id: data.dagId, + task_id: data.taskId, + dag_run_id: data.dagRunId, + xcom_key: data.xcomKey + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollectionResponse Successful Response + * @throws ApiError + */ + public static getXcomEntries(data: GetXcomEntriesData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + query: { + xcom_key: data.xcomKey, + map_index: data.mapIndex, + limit: data.limit, + offset: data.offset + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Create Xcom Entry + * Create an XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.requestBody + * @returns XComResponseNative Successful Response + * @throws ApiError + */ + public static createXcomEntry(data: CreateXcomEntryData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries', + path: { + dag_id: data.dagId, + task_id: data.taskId, + dag_run_id: data.dagRunId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class TaskService { - /** - * Get Tasks - * Get tasks for DAG. - * @param data The data for the request. - * @param data.dagId - * @param data.orderBy - * @returns TaskCollectionResponse Successful Response - * @throws ApiError - */ - public static getTasks(data: GetTasksData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/tasks", - path: { - dag_id: data.dagId, - }, - query: { - order_by: data.orderBy, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Task - * Get simplified representation of a task. - * @param data The data for the request. - * @param data.dagId - * @param data.taskId - * @returns TaskResponse Successful Response - * @throws ApiError - */ - public static getTask(data: GetTaskData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/tasks/{task_id}", - path: { - dag_id: data.dagId, - task_id: data.taskId, - }, - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ + public static getTasks(data: GetTasksData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/tasks', + path: { + dag_id: data.dagId + }, + query: { + order_by: data.orderBy + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ + public static getTask(data: GetTaskData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/tasks/{task_id}', + path: { + dag_id: data.dagId, + task_id: data.taskId + }, + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class VariableService { - /** - * Delete Variable - * Delete a variable entry. - * @param data The data for the request. - * @param data.variableKey - * @returns void Successful Response - * @throws ApiError - */ - public static deleteVariable(data: DeleteVariableData): CancelablePromise { - return __request(OpenAPI, { - method: "DELETE", - url: "/api/v2/variables/{variable_key}", - path: { - variable_key: data.variableKey, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Variable - * Get a variable entry. - * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response - * @throws ApiError - */ - public static getVariable(data: GetVariableData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/variables/{variable_key}", - path: { - variable_key: data.variableKey, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Patch Variable - * Update a variable by key. - * @param data The data for the request. - * @param data.variableKey - * @param data.requestBody - * @param data.updateMask - * @returns VariableResponse Successful Response - * @throws ApiError - */ - public static patchVariable(data: PatchVariableData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/variables/{variable_key}", - path: { - variable_key: data.variableKey, - }, - query: { - update_mask: data.updateMask, - }, - body: data.requestBody, - mediaType: "application/json", - errors: { - 400: "Bad Request", - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } - - /** - * Get Variables - * Get all Variables entries. - * @param data The data for the request. - * @param data.limit - * @param data.offset - * @param data.orderBy - * @param data.variableKeyPattern - * @returns VariableCollectionResponse Successful Response - * @throws ApiError - */ - public static getVariables(data: GetVariablesData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/variables", - query: { - limit: data.limit, - offset: data.offset, - order_by: data.orderBy, - variable_key_pattern: data.variableKeyPattern, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } - - /** - * Post Variable - * Create a variable. - * @param data The data for the request. - * @param data.requestBody - * @returns VariableResponse Successful Response - * @throws ApiError - */ - public static postVariable(data: PostVariableData): CancelablePromise { - return __request(OpenAPI, { - method: "POST", - url: "/api/v2/variables", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 409: "Conflict", - 422: "Validation Error", - }, - }); - } - - /** - * Bulk Variables - * Bulk create, update, and delete variables. - * @param data The data for the request. - * @param data.requestBody - * @returns BulkResponse Successful Response - * @throws ApiError - */ - public static bulkVariables(data: BulkVariablesData): CancelablePromise { - return __request(OpenAPI, { - method: "PATCH", - url: "/api/v2/variables", - body: data.requestBody, - mediaType: "application/json", - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 422: "Validation Error", - }, - }); - } + /** + * Delete Variable + * Delete a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns void Successful Response + * @throws ApiError + */ + public static deleteVariable(data: DeleteVariableData): CancelablePromise { + return __request(OpenAPI, { + method: 'DELETE', + url: '/api/v2/variables/{variable_key}', + path: { + variable_key: data.variableKey + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static getVariable(data: GetVariableData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/variables/{variable_key}', + path: { + variable_key: data.variableKey + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Patch Variable + * Update a variable by key. + * @param data The data for the request. + * @param data.variableKey + * @param data.requestBody + * @param data.updateMask + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static patchVariable(data: PatchVariableData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/variables/{variable_key}', + path: { + variable_key: data.variableKey + }, + query: { + update_mask: data.updateMask + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.variableKeyPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ + public static getVariables(data: GetVariablesData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/variables', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + variable_key_pattern: data.variableKeyPattern + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + + /** + * Post Variable + * Create a variable. + * @param data The data for the request. + * @param data.requestBody + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static postVariable(data: PostVariableData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/variables', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Bulk Variables + * Bulk create, update, and delete variables. + * @param data The data for the request. + * @param data.requestBody + * @returns BulkResponse Successful Response + * @throws ApiError + */ + public static bulkVariables(data: BulkVariablesData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/variables', + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class DagParsingService { - /** - * Reparse Dag File - * Request re-parsing a DAG file. - * @param data The data for the request. - * @param data.fileToken - * @returns null Successful Response - * @throws ApiError - */ - public static reparseDagFile(data: ReparseDagFileData): CancelablePromise { - return __request(OpenAPI, { - method: "PUT", - url: "/api/v2/parseDagFile/{file_token}", - path: { - file_token: data.fileToken, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Reparse Dag File + * Request re-parsing a DAG file. + * @param data The data for the request. + * @param data.fileToken + * @returns null Successful Response + * @throws ApiError + */ + public static reparseDagFile(data: ReparseDagFileData): CancelablePromise { + return __request(OpenAPI, { + method: 'PUT', + url: '/api/v2/parseDagFile/{file_token}', + path: { + file_token: data.fileToken + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class DagVersionService { - /** - * Get Dag Version - * Get one Dag Version. - * @param data The data for the request. - * @param data.dagId - * @param data.versionNumber - * @returns DagVersionResponse Successful Response - * @throws ApiError - */ - public static getDagVersion(data: GetDagVersionData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagVersions/{version_number}", - path: { - dag_id: data.dagId, - version_number: data.versionNumber, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Dag Version + * Get one Dag Version. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @returns DagVersionResponse Successful Response + * @throws ApiError + */ + public static getDagVersion(data: GetDagVersionData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagVersions/{version_number}', + path: { + dag_id: data.dagId, + version_number: data.versionNumber + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Dag Versions + * Get all DAG Versions. + * + * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.versionNumber + * @param data.bundleName + * @param data.bundleVersion + * @param data.orderBy + * @returns DAGVersionCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagVersions(data: GetDagVersionsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagVersions', + path: { + dag_id: data.dagId + }, + query: { + limit: data.limit, + offset: data.offset, + version_number: data.versionNumber, + bundle_name: data.bundleName, + bundle_version: data.bundleVersion, + order_by: data.orderBy + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + +} - /** - * Get Dag Versions - * Get all DAG Versions. - * - * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. - * @param data The data for the request. - * @param data.dagId - * @param data.limit - * @param data.offset - * @param data.versionNumber - * @param data.bundleName - * @param data.bundleVersion - * @param data.orderBy - * @returns DAGVersionCollectionResponse Successful Response - * @throws ApiError - */ - public static getDagVersions(data: GetDagVersionsData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/dags/{dag_id}/dagVersions", - path: { - dag_id: data.dagId, - }, - query: { - limit: data.limit, - offset: data.offset, - version_number: data.versionNumber, - bundle_name: data.bundleName, - bundle_version: data.bundleVersion, - order_by: data.orderBy, - }, - errors: { - 401: "Unauthorized", - 403: "Forbidden", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } +export class HumanInTheLoopService { + /** + * Update Hitl Detail + * Update a Human-in-the-loop detail. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.requestBody + * @returns HITLDetailResponse Successful Response + * @throws ApiError + */ + public static updateHitlDetail(data: UpdateHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Hitl Detail + * Get a Human-in-the-loop detail of a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns HITLDetail Successful Response + * @throws ApiError + */ + public static getHitlDetail(data: GetHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Update Mapped Ti Hitl Detail + * Update a Human-in-the-loop detail. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @returns HITLDetailResponse Successful Response + * @throws ApiError + */ + public static updateMappedTiHitlDetail(data: UpdateMappedTiHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}/{map_index}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Mapped Ti Hitl Detail + * Get a Human-in-the-loop detail of a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns HITLDetail Successful Response + * @throws ApiError + */ + public static getMappedTiHitlDetail(data: GetMappedTiHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}/{map_index}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Hitl Details + * Get Human-in-the-loop details. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.dagIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.dagRunId + * @param data.taskId + * @param data.taskIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.state + * @param data.responseReceived + * @param data.userId + * @param data.subjectSearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @param data.bodySearch SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + * @returns HITLDetailCollection Successful Response + * @throws ApiError + */ + public static getHitlDetails(data: GetHitlDetailsData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitlDetails/', + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + dag_id: data.dagId, + dag_id_pattern: data.dagIdPattern, + dag_run_id: data.dagRunId, + task_id: data.taskId, + task_id_pattern: data.taskIdPattern, + state: data.state, + response_received: data.responseReceived, + user_id: data.userId, + subject_search: data.subjectSearch, + body_search: data.bodySearch + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 422: 'Validation Error' + } + }); + } + } export class MonitorService { - /** - * Get Health - * @returns HealthInfoResponse Successful Response - * @throws ApiError - */ - public static getHealth(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/monitor/health", - }); - } + /** + * Get Health + * @returns HealthInfoResponse Successful Response + * @throws ApiError + */ + public static getHealth(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/monitor/health' + }); + } + } export class VersionService { - /** - * Get Version - * Get version information. - * @returns VersionInfo Successful Response - * @throws ApiError - */ - public static getVersion(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/version", - }); - } + /** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ + public static getVersion(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/version' + }); + } + } export class LoginService { - /** - * Login - * Redirect to the login URL depending on the AuthManager configured. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ - public static login(data: LoginData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/auth/login", - query: { - next: data.next, - }, - errors: { - 307: "Temporary Redirect", - 422: "Validation Error", - }, - }); - } - - /** - * Logout - * Logout the user. - * @param data The data for the request. - * @param data.next - * @returns unknown Successful Response - * @throws ApiError - */ - public static logout(data: LogoutData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/api/v2/auth/logout", - query: { - next: data.next, - }, - errors: { - 307: "Temporary Redirect", - 422: "Validation Error", - }, - }); - } + /** + * Login + * Redirect to the login URL depending on the AuthManager configured. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ + public static login(data: LoginData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/auth/login', + query: { + next: data.next + }, + errors: { + 307: 'Temporary Redirect', + 422: 'Validation Error' + } + }); + } + + /** + * Logout + * Logout the user. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ + public static logout(data: LogoutData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/auth/logout', + query: { + next: data.next + }, + errors: { + 307: 'Temporary Redirect', + 422: 'Validation Error' + } + }); + } + + /** + * Refresh + * Refresh the authentication token. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ + public static refresh(data: RefreshData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/auth/refresh', + query: { + next: data.next + }, + errors: { + 307: 'Temporary Redirect', + 422: 'Validation Error' + } + }); + } + } export class AuthLinksService { - /** - * Get Auth Menus - * @returns MenuItemCollectionResponse Successful Response - * @throws ApiError - */ - public static getAuthMenus(): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/auth/menus", - }); - } -} - -export class DagsService { - /** - * Recent Dag Runs - * Get recent DAG runs. - * @param data The data for the request. - * @param data.dagRunsLimit - * @param data.limit - * @param data.offset - * @param data.tags - * @param data.tagsMatchMode - * @param data.owners - * @param data.dagIds - * @param data.dagIdPattern - * @param data.dagDisplayNamePattern - * @param data.excludeStale - * @param data.paused - * @param data.lastDagRunState - * @returns DAGWithLatestDagRunsCollectionResponse Successful Response - * @throws ApiError - */ - public static recentDagRuns(data: RecentDagRunsData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/dags/recent_dag_runs", - query: { - dag_runs_limit: data.dagRunsLimit, - limit: data.limit, - offset: data.offset, - tags: data.tags, - tags_match_mode: data.tagsMatchMode, - owners: data.owners, - dag_ids: data.dagIds, - dag_id_pattern: data.dagIdPattern, - dag_display_name_pattern: data.dagDisplayNamePattern, - exclude_stale: data.excludeStale, - paused: data.paused, - last_dag_run_state: data.lastDagRunState, - }, - errors: { - 422: "Validation Error", - }, - }); - } + /** + * Get Auth Menus + * @returns MenuItemCollectionResponse Successful Response + * @throws ApiError + */ + public static getAuthMenus(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/auth/menus' + }); + } + } export class DependenciesService { - /** - * Get Dependencies - * Dependencies graph. - * @param data The data for the request. - * @param data.nodeId - * @returns BaseGraphResponse Successful Response - * @throws ApiError - */ - public static getDependencies(data: GetDependenciesData = {}): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/dependencies", - query: { - node_id: data.nodeId, - }, - errors: { - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Dependencies + * Dependencies graph. + * @param data The data for the request. + * @param data.nodeId + * @returns BaseGraphResponse Successful Response + * @throws ApiError + */ + public static getDependencies(data: GetDependenciesData = {}): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/dependencies', + query: { + node_id: data.nodeId + }, + errors: { + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class DashboardService { - /** - * Historical Metrics - * Return cluster activity historical metrics. - * @param data The data for the request. - * @param data.startDate - * @param data.endDate - * @returns HistoricalMetricDataResponse Successful Response - * @throws ApiError - */ - public static historicalMetrics(data: HistoricalMetricsData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/dashboard/historical_metrics_data", - query: { - start_date: data.startDate, - end_date: data.endDate, - }, - errors: { - 400: "Bad Request", - 422: "Validation Error", - }, - }); - } + /** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ + public static historicalMetrics(data: HistoricalMetricsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/dashboard/historical_metrics_data', + query: { + start_date: data.startDate, + end_date: data.endDate + }, + errors: { + 400: 'Bad Request', + 422: 'Validation Error' + } + }); + } + + /** + * Dag Stats + * Return basic DAG stats with counts of DAGs in various states. + * @returns DashboardDagStatsResponse Successful Response + * @throws ApiError + */ + public static dagStats(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/dashboard/dag_stats' + }); + } + } export class StructureService { - /** - * Structure Data - * Get Structure Data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.externalDependencies - * @param data.versionNumber - * @returns StructureDataResponse Successful Response - * @throws ApiError - */ - public static structureData(data: StructureDataData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/structure/structure_data", - query: { - dag_id: data.dagId, - include_upstream: data.includeUpstream, - include_downstream: data.includeDownstream, - root: data.root, - external_dependencies: data.externalDependencies, - version_number: data.versionNumber, - }, - errors: { - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Structure Data + * Get Structure Data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.externalDependencies + * @param data.versionNumber + * @returns StructureDataResponse Successful Response + * @throws ApiError + */ + public static structureData(data: StructureDataData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/structure/structure_data', + query: { + dag_id: data.dagId, + include_upstream: data.includeUpstream, + include_downstream: data.includeDownstream, + root: data.root, + external_dependencies: data.externalDependencies, + version_number: data.versionNumber + }, + errors: { + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } export class GridService { - /** - * Grid Data - * Return grid data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.offset - * @param data.runType - * @param data.state - * @param data.limit - * @param data.orderBy - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @returns GridResponse Successful Response - * @throws ApiError - */ - public static gridData(data: GridDataData): CancelablePromise { - return __request(OpenAPI, { - method: "GET", - url: "/ui/grid/{dag_id}", - path: { - dag_id: data.dagId, - }, - query: { - include_upstream: data.includeUpstream, - include_downstream: data.includeDownstream, - root: data.root, - offset: data.offset, - run_type: data.runType, - state: data.state, - limit: data.limit, - order_by: data.orderBy, - run_after_gte: data.runAfterGte, - run_after_lte: data.runAfterLte, - logical_date_gte: data.logicalDateGte, - logical_date_lte: data.logicalDateLte, - }, - errors: { - 400: "Bad Request", - 404: "Not Found", - 422: "Validation Error", - }, - }); - } + /** + * Get Dag Structure + * Return dag structure for grid view. + * @param data The data for the request. + * @param data.dagId + * @param data.offset + * @param data.limit + * @param data.orderBy + * @param data.runAfterGte + * @param data.runAfterLte + * @returns GridNodeResponse Successful Response + * @throws ApiError + */ + public static getDagStructure(data: GetDagStructureData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/grid/structure/{dag_id}', + path: { + dag_id: data.dagId + }, + query: { + offset: data.offset, + limit: data.limit, + order_by: data.orderBy, + run_after_gte: data.runAfterGte, + run_after_lte: data.runAfterLte + }, + errors: { + 400: 'Bad Request', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Grid Runs + * Get info about a run for the grid. + * @param data The data for the request. + * @param data.dagId + * @param data.offset + * @param data.limit + * @param data.orderBy + * @param data.runAfterGte + * @param data.runAfterLte + * @returns GridRunsResponse Successful Response + * @throws ApiError + */ + public static getGridRuns(data: GetGridRunsData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/grid/runs/{dag_id}', + path: { + dag_id: data.dagId + }, + query: { + offset: data.offset, + limit: data.limit, + order_by: data.orderBy, + run_after_gte: data.runAfterGte, + run_after_lte: data.runAfterLte + }, + errors: { + 400: 'Bad Request', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Grid Ti Summaries + * Get states for TIs / "groups" of TIs. + * + * Essentially this is to know what color to put in the squares in the grid. + * + * The tricky part here is that we aggregate the state for groups and mapped tasks. + * + * We don't add all the TIs for mapped TIs -- we only add one entry for the mapped task and + * its state is an aggregate of its TI states. + * + * And for task groups, we add a "task" for that which is not really a task but is just + * an entry that represents the group (so that we can show a filled in box when the group + * is not expanded) and its state is an agg of those within it. + * @param data The data for the request. + * @param data.dagId + * @param data.runId + * @returns GridTISummaries Successful Response + * @throws ApiError + */ + public static getGridTiSummaries(data: GetGridTiSummariesData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/grid/ti_summaries/{dag_id}/{run_id}', + path: { + dag_id: data.dagId, + run_id: data.runId + }, + errors: { + 400: 'Bad Request', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + } + +export class CalendarService { + /** + * Get Calendar + * Get calendar data for a DAG including historical and planned DAG runs. + * @param data The data for the request. + * @param data.dagId + * @param data.granularity + * @param data.logicalDateGte + * @param data.logicalDateLte + * @returns CalendarTimeRangeCollectionResponse Successful Response + * @throws ApiError + */ + public static getCalendar(data: GetCalendarData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/calendar/{dag_id}', + path: { + dag_id: data.dagId + }, + query: { + granularity: data.granularity, + logical_date_gte: data.logicalDateGte, + logical_date_lte: data.logicalDateLte + }, + errors: { + 422: 'Validation Error' + } + }); + } + +} \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index 70139967f2895..b0f1a6e5d27ac 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -4,157 +4,155 @@ * Serializer for AppBuilder Menu Item responses. */ export type AppBuilderMenuItemResponse = { - name: string; - href?: string | null; - category?: string | null; - [key: string]: unknown | string; + name: string; + href: string; + category?: string | null; + [key: string]: unknown | string; }; /** * Serializer for AppBuilder View responses. */ export type AppBuilderViewResponse = { - name?: string | null; - category?: string | null; - view?: string | null; - label?: string | null; - [key: string]: unknown; + name?: string | null; + category?: string | null; + view?: string | null; + label?: string | null; + [key: string]: unknown; }; /** * Asset alias collection response. */ export type AssetAliasCollectionResponse = { - asset_aliases: Array; - total_entries: number; + asset_aliases: Array; + total_entries: number; }; /** * Asset alias serializer for responses. */ export type AssetAliasResponse = { - id: number; - name: string; - group: string; + id: number; + name: string; + group: string; }; /** * Asset collection response. */ export type AssetCollectionResponse = { - assets: Array; - total_entries: number; + assets: Array; + total_entries: number; }; /** * Asset event collection response. */ export type AssetEventCollectionResponse = { - asset_events: Array; - total_entries: number; + asset_events: Array; + total_entries: number; }; /** * Asset event serializer for responses. */ export type AssetEventResponse = { - id: number; - asset_id: number; - uri?: string | null; - name?: string | null; - group?: string | null; - extra?: { + id: number; + asset_id: number; + uri?: string | null; + name?: string | null; + group?: string | null; + extra?: { [key: string]: unknown; - } | null; - source_task_id?: string | null; - source_dag_id?: string | null; - source_run_id?: string | null; - source_map_index: number; - created_dagruns: Array; - timestamp: string; +} | null; + source_task_id?: string | null; + source_dag_id?: string | null; + source_run_id?: string | null; + source_map_index: number; + created_dagruns: Array; + timestamp: string; }; /** * Asset serializer for responses. */ export type AssetResponse = { - id: number; - name: string; - uri: string; - group: string; - extra?: { + id: number; + name: string; + uri: string; + group: string; + extra?: { [key: string]: unknown; - } | null; - created_at: string; - updated_at: string; - consuming_dags: Array; - producing_tasks: Array; - aliases: Array; +} | null; + created_at: string; + updated_at: string; + scheduled_dags: Array; + producing_tasks: Array; + consuming_tasks: Array; + aliases: Array; + last_asset_event?: LastAssetEventResponse | null; }; /** * Backfill Collection serializer for responses. */ export type BackfillCollectionResponse = { - backfills: Array; - total_entries: number; + backfills: Array; + total_entries: number; }; /** * Object used for create backfill request. */ export type BackfillPostBody = { - dag_id: string; - from_date: string; - to_date: string; - run_backwards?: boolean; - dag_run_conf?: { - [key: string]: unknown; - }; - reprocess_behavior?: ReprocessBehavior; - max_active_runs?: number; + dag_id: string; + from_date: string; + to_date: string; + run_backwards?: boolean; + dag_run_conf?: { + [key: string]: unknown; + }; + reprocess_behavior?: ReprocessBehavior; + max_active_runs?: number; }; /** * Base serializer for Backfill. */ export type BackfillResponse = { - id: number; - dag_id: string; - from_date: string; - to_date: string; - dag_run_conf: { - [key: string]: unknown; - }; - is_paused: boolean; - reprocess_behavior: ReprocessBehavior; - max_active_runs: number; - created_at: string; - completed_at: string | null; - updated_at: string; + id: number; + dag_id: string; + from_date: string; + to_date: string; + dag_run_conf: { + [key: string]: unknown; + }; + is_paused: boolean; + reprocess_behavior: ReprocessBehavior; + max_active_runs: number; + created_at: string; + completed_at: string | null; + updated_at: string; + dag_display_name: string; }; /** * Base info serializer for responses. */ export type BaseInfoResponse = { - status: string | null; + status: string | null; }; -/** - * Bulk Action to be performed on the used model. - */ -export type BulkAction = "create" | "delete" | "update"; - /** * Bulk Action to be taken if the entity does not exist. */ -export type BulkActionNotOnExistence = "fail" | "skip"; +export type BulkActionNotOnExistence = 'fail' | 'skip'; /** * Bulk Action to be taken if the entity already exists or not. */ -export type BulkActionOnExistence = "fail" | "skip" | "overwrite"; +export type BulkActionOnExistence = 'fail' | 'skip' | 'overwrite'; /** * Serializer for individual bulk action responses. @@ -164,104 +162,128 @@ export type BulkActionOnExistence = "fail" | "skip" | "overwrite"; * This structure helps users understand which key actions succeeded and which failed. */ export type BulkActionResponse = { - /** - * A list of unique id/key representing successful operations. - */ - success?: Array; - /** - * A list of errors encountered during the operation, each containing details about the issue. - */ - errors?: Array<{ - [key: string]: unknown; - }>; + /** + * A list of unique id/key representing successful operations. + */ + success?: Array<(string)>; + /** + * A list of errors encountered during the operation, each containing details about the issue. + */ + errors?: Array<{ + [key: string]: unknown; + }>; +}; + +export type BulkBody_BulkTaskInstanceBody_ = { + actions: Array<(BulkCreateAction_BulkTaskInstanceBody_ | BulkUpdateAction_BulkTaskInstanceBody_ | BulkDeleteAction_BulkTaskInstanceBody_)>; }; export type BulkBody_ConnectionBody_ = { - actions: Array< - BulkCreateAction_ConnectionBody_ | BulkUpdateAction_ConnectionBody_ | BulkDeleteAction_ConnectionBody_ - >; + actions: Array<(BulkCreateAction_ConnectionBody_ | BulkUpdateAction_ConnectionBody_ | BulkDeleteAction_ConnectionBody_)>; }; export type BulkBody_PoolBody_ = { - actions: Array; + actions: Array<(BulkCreateAction_PoolBody_ | BulkUpdateAction_PoolBody_ | BulkDeleteAction_PoolBody_)>; }; export type BulkBody_VariableBody_ = { - actions: Array< - BulkCreateAction_VariableBody_ | BulkUpdateAction_VariableBody_ | BulkDeleteAction_VariableBody_ - >; + actions: Array<(BulkCreateAction_VariableBody_ | BulkUpdateAction_VariableBody_ | BulkDeleteAction_VariableBody_)>; +}; + +export type BulkCreateAction_BulkTaskInstanceBody_ = { + /** + * The action to be performed on the entities. + */ + action: "create"; + /** + * A list of entities to be created. + */ + entities: Array; + action_on_existence?: BulkActionOnExistence; }; export type BulkCreateAction_ConnectionBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entities to be created. - */ - entities: Array; - action_on_existence?: BulkActionOnExistence; + /** + * The action to be performed on the entities. + */ + action: "create"; + /** + * A list of entities to be created. + */ + entities: Array; + action_on_existence?: BulkActionOnExistence; }; export type BulkCreateAction_PoolBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entities to be created. - */ - entities: Array; - action_on_existence?: BulkActionOnExistence; + /** + * The action to be performed on the entities. + */ + action: "create"; + /** + * A list of entities to be created. + */ + entities: Array; + action_on_existence?: BulkActionOnExistence; }; export type BulkCreateAction_VariableBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entities to be created. - */ - entities: Array; - action_on_existence?: BulkActionOnExistence; + /** + * The action to be performed on the entities. + */ + action: "create"; + /** + * A list of entities to be created. + */ + entities: Array; + action_on_existence?: BulkActionOnExistence; +}; + +export type BulkDeleteAction_BulkTaskInstanceBody_ = { + /** + * The action to be performed on the entities. + */ + action: "delete"; + /** + * A list of entity id/key to be deleted. + */ + entities: Array<(string)>; + action_on_non_existence?: BulkActionNotOnExistence; }; export type BulkDeleteAction_ConnectionBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entity id/key to be deleted. - */ - entities: Array; - action_on_non_existence?: BulkActionNotOnExistence; + /** + * The action to be performed on the entities. + */ + action: "delete"; + /** + * A list of entity id/key to be deleted. + */ + entities: Array<(string)>; + action_on_non_existence?: BulkActionNotOnExistence; }; export type BulkDeleteAction_PoolBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entity id/key to be deleted. - */ - entities: Array; - action_on_non_existence?: BulkActionNotOnExistence; + /** + * The action to be performed on the entities. + */ + action: "delete"; + /** + * A list of entity id/key to be deleted. + */ + entities: Array<(string)>; + action_on_non_existence?: BulkActionNotOnExistence; }; export type BulkDeleteAction_VariableBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entity id/key to be deleted. - */ - entities: Array; - action_on_non_existence?: BulkActionNotOnExistence; + /** + * The action to be performed on the entities. + */ + action: "delete"; + /** + * A list of entity id/key to be deleted. + */ + entities: Array<(string)>; + action_on_non_existence?: BulkActionNotOnExistence; }; /** @@ -272,393 +294,446 @@ export type BulkDeleteAction_VariableBody_ = { * Fields are populated in the response only if the respective action was part of the request, else are set None. */ export type BulkResponse = { - /** - * Details of the bulk create operation, including successful keys and errors. - */ - create?: BulkActionResponse | null; - /** - * Details of the bulk update operation, including successful keys and errors. - */ - update?: BulkActionResponse | null; - /** - * Details of the bulk delete operation, including successful keys and errors. - */ - delete?: BulkActionResponse | null; + /** + * Details of the bulk create operation, including successful keys and errors. + */ + create?: BulkActionResponse | null; + /** + * Details of the bulk update operation, including successful keys and errors. + */ + update?: BulkActionResponse | null; + /** + * Details of the bulk delete operation, including successful keys and errors. + */ + delete?: BulkActionResponse | null; +}; + +/** + * Request body for bulk update, and delete task instances. + */ +export type BulkTaskInstanceBody = { + new_state?: TaskInstanceState | null; + note?: string | null; + include_upstream?: boolean; + include_downstream?: boolean; + include_future?: boolean; + include_past?: boolean; + task_id: string; + map_index?: number | null; +}; + +export type BulkUpdateAction_BulkTaskInstanceBody_ = { + /** + * The action to be performed on the entities. + */ + action: "update"; + /** + * A list of entities to be updated. + */ + entities: Array; + action_on_non_existence?: BulkActionNotOnExistence; }; export type BulkUpdateAction_ConnectionBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entities to be updated. - */ - entities: Array; - action_on_non_existence?: BulkActionNotOnExistence; + /** + * The action to be performed on the entities. + */ + action: "update"; + /** + * A list of entities to be updated. + */ + entities: Array; + action_on_non_existence?: BulkActionNotOnExistence; }; export type BulkUpdateAction_PoolBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entities to be updated. - */ - entities: Array; - action_on_non_existence?: BulkActionNotOnExistence; + /** + * The action to be performed on the entities. + */ + action: "update"; + /** + * A list of entities to be updated. + */ + entities: Array; + action_on_non_existence?: BulkActionNotOnExistence; }; export type BulkUpdateAction_VariableBody_ = { - /** - * The action to be performed on the entities. - */ - action: BulkAction; - /** - * A list of entities to be updated. - */ - entities: Array; - action_on_non_existence?: BulkActionNotOnExistence; + /** + * The action to be performed on the entities. + */ + action: "update"; + /** + * A list of entities to be updated. + */ + entities: Array; + action_on_non_existence?: BulkActionNotOnExistence; }; /** * Request body for Clear Task Instances endpoint. */ export type ClearTaskInstancesBody = { - dry_run?: boolean; - start_date?: string | null; - end_date?: string | null; - only_failed?: boolean; - only_running?: boolean; - reset_dag_runs?: boolean; - task_ids?: Array | null; - dag_run_id?: string | null; - include_upstream?: boolean; - include_downstream?: boolean; - include_future?: boolean; - include_past?: boolean; + dry_run?: boolean; + start_date?: string | null; + end_date?: string | null; + only_failed?: boolean; + only_running?: boolean; + reset_dag_runs?: boolean; + task_ids?: Array<(string | [ + string, + number +])> | null; + dag_run_id?: string | null; + include_upstream?: boolean; + include_downstream?: boolean; + include_future?: boolean; + include_past?: boolean; + /** + * (Experimental) Run on the latest bundle version of the dag after clearing the task instances. + */ + run_on_latest_version?: boolean; }; /** * List of config sections with their options. */ export type Config = { - sections: Array; + sections: Array; }; /** * Config option. */ export type ConfigOption = { - key: string; - value: string | [string, string]; + key: string; + value: string | [ + string, + string +]; }; /** * Config Section Schema. */ export type ConfigSection = { - name: string; - options: Array; + name: string; + options: Array; }; /** * Connection Serializer for requests body. */ export type ConnectionBody = { - connection_id: string; - conn_type: string; - description?: string | null; - host?: string | null; - login?: string | null; - schema?: string | null; - port?: number | null; - password?: string | null; - extra?: string | null; + connection_id: string; + conn_type: string; + description?: string | null; + host?: string | null; + login?: string | null; + schema?: string | null; + port?: number | null; + password?: string | null; + extra?: string | null; }; /** * Connection Collection serializer for responses. */ export type ConnectionCollectionResponse = { - connections: Array; - total_entries: number; + connections: Array; + total_entries: number; }; /** * Connection serializer for responses. */ export type ConnectionResponse = { - connection_id: string; - conn_type: string; - description: string | null; - host: string | null; - login: string | null; - schema: string | null; - port: number | null; - password: string | null; - extra: string | null; + connection_id: string; + conn_type: string; + description: string | null; + host: string | null; + login: string | null; + schema: string | null; + port: number | null; + password: string | null; + extra: string | null; }; /** * Connection Test serializer for responses. */ export type ConnectionTestResponse = { - status: boolean; - message: string; + status: boolean; + message: string; }; /** * Create asset events request. */ export type CreateAssetEventsBody = { - asset_id: number; - extra?: { - [key: string]: unknown; - }; + asset_id: number; + extra?: { + [key: string]: unknown; + }; }; /** * DAG Collection serializer for responses. */ export type DAGCollectionResponse = { - dags: Array; - total_entries: number; + dags: Array; + total_entries: number; }; /** * Specific serializer for DAG Details responses. */ export type DAGDetailsResponse = { - dag_id: string; - dag_display_name: string; - is_paused: boolean; - is_stale: boolean; - last_parsed_time: string | null; - last_expired: string | null; - bundle_name: string | null; - relative_fileloc: string | null; - fileloc: string; - description: string | null; - timetable_summary: string | null; - timetable_description: string | null; - tags: Array; - max_active_tasks: number; - max_active_runs: number | null; - max_consecutive_failed_dag_runs: number; - has_task_concurrency_limits: boolean; - has_import_errors: boolean; - next_dagrun_logical_date: string | null; - next_dagrun_data_interval_start: string | null; - next_dagrun_data_interval_end: string | null; - next_dagrun_run_after: string | null; - owners: Array; - catchup: boolean; - dag_run_timeout: string | null; - asset_expression: { + dag_id: string; + dag_display_name: string; + is_paused: boolean; + is_stale: boolean; + last_parsed_time: string | null; + last_expired: string | null; + bundle_name: string | null; + bundle_version: string | null; + relative_fileloc: string | null; + fileloc: string; + description: string | null; + timetable_summary: string | null; + timetable_description: string | null; + tags: Array; + max_active_tasks: number; + max_active_runs: number | null; + max_consecutive_failed_dag_runs: number; + has_task_concurrency_limits: boolean; + has_import_errors: boolean; + next_dagrun_logical_date: string | null; + next_dagrun_data_interval_start: string | null; + next_dagrun_data_interval_end: string | null; + next_dagrun_run_after: string | null; + owners: Array<(string)>; + catchup: boolean; + dag_run_timeout: string | null; + asset_expression: { + [key: string]: unknown; +} | null; + doc_md: string | null; + start_date: string | null; + end_date: string | null; + is_paused_upon_creation: boolean | null; + params: { [key: string]: unknown; - } | null; - doc_md: string | null; - start_date: string | null; - end_date: string | null; - is_paused_upon_creation: boolean | null; - params: { +} | null; + render_template_as_native_obj: boolean; + template_search_path: Array<(string)> | null; + timezone: string | null; + last_parsed: string | null; + default_args: { [key: string]: unknown; - } | null; - render_template_as_native_obj: boolean; - template_search_path: Array | null; - timezone: string | null; - last_parsed: string | null; - /** - * Return file token. - */ - readonly file_token: string; - /** - * Return max_active_tasks as concurrency. - */ - readonly concurrency: number; - /** - * Return the latest DagVersion. - */ - readonly latest_dag_version: DagVersionResponse | null; +} | null; + owner_links?: { + [key: string]: (string); +} | null; + /** + * Return file token. + */ + readonly file_token: string; + /** + * Return max_active_tasks as concurrency. + */ + readonly concurrency: number; + /** + * Return the latest DagVersion. + */ + readonly latest_dag_version: DagVersionResponse | null; }; /** * Dag Serializer for updatable bodies. */ export type DAGPatchBody = { - is_paused: boolean; + is_paused: boolean; }; /** * DAG serializer for responses. */ export type DAGResponse = { - dag_id: string; - dag_display_name: string; - is_paused: boolean; - is_stale: boolean; - last_parsed_time: string | null; - last_expired: string | null; - bundle_name: string | null; - relative_fileloc: string | null; - fileloc: string; - description: string | null; - timetable_summary: string | null; - timetable_description: string | null; - tags: Array; - max_active_tasks: number; - max_active_runs: number | null; - max_consecutive_failed_dag_runs: number; - has_task_concurrency_limits: boolean; - has_import_errors: boolean; - next_dagrun_logical_date: string | null; - next_dagrun_data_interval_start: string | null; - next_dagrun_data_interval_end: string | null; - next_dagrun_run_after: string | null; - owners: Array; - /** - * Return file token. - */ - readonly file_token: string; + dag_id: string; + dag_display_name: string; + is_paused: boolean; + is_stale: boolean; + last_parsed_time: string | null; + last_expired: string | null; + bundle_name: string | null; + bundle_version: string | null; + relative_fileloc: string | null; + fileloc: string; + description: string | null; + timetable_summary: string | null; + timetable_description: string | null; + tags: Array; + max_active_tasks: number; + max_active_runs: number | null; + max_consecutive_failed_dag_runs: number; + has_task_concurrency_limits: boolean; + has_import_errors: boolean; + next_dagrun_logical_date: string | null; + next_dagrun_data_interval_start: string | null; + next_dagrun_data_interval_end: string | null; + next_dagrun_run_after: string | null; + owners: Array<(string)>; + /** + * Return file token. + */ + readonly file_token: string; }; /** * DAG Run serializer for clear endpoint body. */ export type DAGRunClearBody = { - dry_run?: boolean; - only_failed?: boolean; + dry_run?: boolean; + only_failed?: boolean; + /** + * (Experimental) Run on the latest bundle version of the Dag after clearing the Dag Run. + */ + run_on_latest_version?: boolean; }; /** * DAG Run Collection serializer for responses. */ export type DAGRunCollectionResponse = { - dag_runs: Array; - total_entries: number; + dag_runs: Array; + total_entries: number; }; /** * DAG Run Serializer for PATCH requests. */ export type DAGRunPatchBody = { - state?: DAGRunPatchStates | null; - note?: string | null; + state?: DAGRunPatchStates | null; + note?: string | null; }; /** * Enum for DAG Run states when updating a DAG Run. */ -export type DAGRunPatchStates = "queued" | "success" | "failed"; +export type DAGRunPatchStates = 'queued' | 'success' | 'failed'; /** * DAG Run serializer for responses. */ export type DAGRunResponse = { - dag_run_id: string; - dag_id: string; - logical_date: string | null; - queued_at: string | null; - start_date: string | null; - end_date: string | null; - data_interval_start: string | null; - data_interval_end: string | null; - run_after: string; - last_scheduling_decision: string | null; - run_type: DagRunType; - state: DagRunState; - triggered_by: DagRunTriggeredByType | null; - conf: { + dag_run_id: string; + dag_id: string; + logical_date: string | null; + queued_at: string | null; + start_date: string | null; + end_date: string | null; + duration: number | null; + data_interval_start: string | null; + data_interval_end: string | null; + run_after: string; + last_scheduling_decision: string | null; + run_type: DagRunType; + state: DagRunState; + triggered_by: DagRunTriggeredByType | null; + triggering_user_name: string | null; + conf: { [key: string]: unknown; - }; - note: string | null; - dag_versions: Array; +} | null; + note: string | null; + dag_versions: Array; + bundle_version: string | null; + dag_display_name: string; }; /** * List DAG Runs body for batch endpoint. */ export type DAGRunsBatchBody = { - order_by?: string | null; - page_offset?: number; - page_limit?: number; - dag_ids?: Array | null; - states?: Array | null; - run_after_gte?: string | null; - run_after_lte?: string | null; - logical_date_gte?: string | null; - logical_date_lte?: string | null; - start_date_gte?: string | null; - start_date_lte?: string | null; - end_date_gte?: string | null; - end_date_lte?: string | null; + order_by?: string | null; + page_offset?: number; + page_limit?: number; + dag_ids?: Array<(string)> | null; + states?: Array<(DagRunState | null)> | null; + run_after_gte?: string | null; + run_after_lte?: string | null; + logical_date_gte?: string | null; + logical_date_lte?: string | null; + start_date_gte?: string | null; + start_date_lte?: string | null; + end_date_gte?: string | null; + end_date_lte?: string | null; }; /** * DAG Source serializer for responses. */ export type DAGSourceResponse = { - content: string | null; - dag_id: string; - version_number: number | null; + content: string | null; + dag_id: string; + version_number: number | null; + dag_display_name: string; }; /** * DAG Tags Collection serializer for responses. */ export type DAGTagCollectionResponse = { - tags: Array; - total_entries: number; + tags: Array<(string)>; + total_entries: number; }; /** * DAG Version Collection serializer for responses. */ export type DAGVersionCollectionResponse = { - dag_versions: Array; - total_entries: number; + dag_versions: Array; + total_entries: number; }; /** * DAG warning collection serializer for responses. */ export type DAGWarningCollectionResponse = { - dag_warnings: Array; - total_entries: number; + dag_warnings: Array; + total_entries: number; }; /** * DAG Warning serializer for responses. */ export type DAGWarningResponse = { - dag_id: string; - warning_type: DagWarningType; - message: string; - timestamp: string; + dag_id: string; + warning_type: DagWarningType; + message: string; + timestamp: string; }; /** * DagProcessor info serializer for responses. */ export type DagProcessorInfoResponse = { - status: string | null; - latest_dag_processor_heartbeat: string | null; + status: string | null; + latest_dag_processor_heartbeat: string | null; }; /** * DAGRun serializer for asset responses. */ export type DagRunAssetReference = { - run_id: string; - dag_id: string; - logical_date: string | null; - start_date: string; - end_date: string | null; - state: string; - data_interval_start: string | null; - data_interval_end: string | null; + run_id: string; + dag_id: string; + logical_date: string | null; + start_date: string; + end_date: string | null; + state: string; + data_interval_start: string | null; + data_interval_end: string | null; }; /** @@ -668,78 +743,72 @@ export type DagRunAssetReference = { * so please ensure that their values always match the ones with the * same name in TaskInstanceState. */ -export type DagRunState = "queued" | "running" | "success" | "failed"; +export type DagRunState = 'queued' | 'running' | 'success' | 'failed'; /** * Class with TriggeredBy types for DagRun. */ -export type DagRunTriggeredByType = - | "cli" - | "operator" - | "rest_api" - | "ui" - | "test" - | "timetable" - | "asset" - | "backfill"; +export type DagRunTriggeredByType = 'cli' | 'operator' | 'rest_api' | 'ui' | 'test' | 'timetable' | 'asset' | 'backfill'; /** * Class with DagRun types. */ -export type DagRunType = "backfill" | "scheduled" | "manual" | "asset_triggered"; +export type DagRunType = 'backfill' | 'scheduled' | 'manual' | 'asset_triggered'; /** * DAG schedule reference serializer for assets. */ export type DagScheduleAssetReference = { - dag_id: string; - created_at: string; - updated_at: string; + dag_id: string; + created_at: string; + updated_at: string; }; /** * DAG Stats Collection serializer for responses. */ export type DagStatsCollectionResponse = { - dags: Array; - total_entries: number; + dags: Array; + total_entries: number; }; /** * DAG Stats serializer for responses. */ export type DagStatsResponse = { - dag_id: string; - stats: Array; + dag_id: string; + dag_display_name: string; + stats: Array; }; /** * DagStatsState serializer for responses. */ export type DagStatsStateResponse = { - state: DagRunState; - count: number; + state: DagRunState; + count: number; }; /** * DAG Tag serializer for responses. */ export type DagTagResponse = { - name: string; - dag_id: string; + name: string; + dag_id: string; }; /** * Dag Version serializer for responses. */ export type DagVersionResponse = { - id: string; - version_number: number; - dag_id: string; - bundle_name: string | null; - bundle_version: string | null; - created_at: string; - readonly bundle_url: string | null; + id: string; + version_number: number; + dag_id: string; + bundle_name: string | null; + bundle_version: string | null; + created_at: string; + dag_display_name: string; + readonly bundle_url: string | null; }; /** @@ -748,392 +817,520 @@ export type DagVersionResponse = { * This is the set of allowable values for the ``warning_type`` field * in the DagWarning model. */ -export type DagWarningType = "asset conflict" | "non-existent pool"; +export type DagWarningType = 'asset conflict' | 'non-existent pool'; /** * Backfill collection serializer for responses in dry-run mode. */ export type DryRunBackfillCollectionResponse = { - backfills: Array; - total_entries: number; + backfills: Array; + total_entries: number; }; /** * Backfill serializer for responses in dry-run mode. */ export type DryRunBackfillResponse = { - logical_date: string; + logical_date: string; }; /** * Event Log Collection Response. */ export type EventLogCollectionResponse = { - event_logs: Array; - total_entries: number; + event_logs: Array; + total_entries: number; }; /** * Event Log Response. */ export type EventLogResponse = { - event_log_id: number; - when: string; - dag_id: string | null; - task_id: string | null; - run_id: string | null; - map_index: number | null; - try_number: number | null; - event: string; - logical_date: string | null; - owner: string | null; - extra: string | null; + event_log_id: number; + when: string; + dag_id: string | null; + task_id: string | null; + run_id: string | null; + map_index: number | null; + try_number: number | null; + event: string; + logical_date: string | null; + owner: string | null; + extra: string | null; + dag_display_name?: string | null; }; +/** + * Response for the external log URL endpoint. + */ +export type ExternalLogUrlResponse = { + url: string; +}; + +/** + * Serializer for External View Plugin responses. + */ +export type ExternalViewResponse = { + name: string; + icon?: string | null; + icon_dark_mode?: string | null; + url_route?: string | null; + category?: string | null; + href: string; + destination?: 'nav' | 'dag' | 'dag_run' | 'task' | 'task_instance'; + [key: string]: unknown | string; +}; + +export type destination = 'nav' | 'dag' | 'dag_run' | 'task' | 'task_instance'; + /** * Extra Links Response. */ export type ExtraLinkCollectionResponse = { - extra_links: { - [key: string]: string | null; - }; - total_entries: number; + extra_links: { + [key: string]: (string | null); + }; + total_entries: number; }; /** * Serializer for Plugin FastAPI App responses. */ export type FastAPIAppResponse = { - app: string; - url_prefix: string; - name: string; - [key: string]: unknown | string; + app: string; + url_prefix: string; + name: string; + [key: string]: unknown | string; }; /** * Serializer for Plugin FastAPI root middleware responses. */ export type FastAPIRootMiddlewareResponse = { - middleware: string; - name: string; - [key: string]: unknown | string; + middleware: string; + name: string; + [key: string]: unknown | string; +}; + +/** + * Schema for Human-in-the-loop detail. + */ +export type HITLDetail = { + task_instance: TaskInstanceResponse; + options: Array<(string)>; + subject: string; + body?: string | null; + defaults?: Array<(string)> | null; + multiple?: boolean; + params?: { + [key: string]: unknown; + }; + user_id?: string | null; + response_at?: string | null; + chosen_options?: Array<(string)> | null; + params_input?: { + [key: string]: unknown; + }; + response_received?: boolean; +}; + +/** + * Schema for a collection of Human-in-the-loop details. + */ +export type HITLDetailCollection = { + hitl_details: Array; + total_entries: number; +}; + +/** + * Response of updating a Human-in-the-loop detail. + */ +export type HITLDetailResponse = { + user_id: string; + response_at: string; + chosen_options: Array<(string)>; + params_input?: { + [key: string]: unknown; + }; }; /** * HTTPException Model used for error response. */ export type HTTPExceptionResponse = { - detail: - | string - | { - [key: string]: unknown; - }; + detail: string | { + [key: string]: unknown; +}; }; export type HTTPValidationError = { - detail?: Array; + detail?: Array; }; /** * Health serializer for responses. */ export type HealthInfoResponse = { - metadatabase: BaseInfoResponse; - scheduler: SchedulerInfoResponse; - triggerer: TriggererInfoResponse; - dag_processor?: DagProcessorInfoResponse | null; + metadatabase: BaseInfoResponse; + scheduler: SchedulerInfoResponse; + triggerer: TriggererInfoResponse; + dag_processor?: DagProcessorInfoResponse | null; }; /** * Import Error Collection Response. */ export type ImportErrorCollectionResponse = { - import_errors: Array; - total_entries: number; + import_errors: Array; + total_entries: number; }; /** * Import Error Response. */ export type ImportErrorResponse = { - import_error_id: number; - timestamp: string; - filename: string; - bundle_name: string | null; - stack_trace: string; + import_error_id: number; + timestamp: string; + filename: string; + bundle_name: string | null; + stack_trace: string; }; /** * Job Collection Response. */ export type JobCollectionResponse = { - jobs: Array; - total_entries: number; + jobs: Array; + total_entries: number; }; /** * Job serializer for responses. */ export type JobResponse = { - id: number; - dag_id: string | null; - state: string | null; - job_type: string | null; - start_date: string | null; - end_date: string | null; - latest_heartbeat: string | null; - executor_class: string | null; - hostname: string | null; - unixname: string | null; + id: number; + dag_id: string | null; + state: string | null; + job_type: string | null; + start_date: string | null; + end_date: string | null; + latest_heartbeat: string | null; + executor_class: string | null; + hostname: string | null; + unixname: string | null; + dag_display_name?: string | null; +}; + +export type JsonValue = unknown; + +/** + * Last asset event response serializer. + */ +export type LastAssetEventResponse = { + id?: number | null; + timestamp?: string | null; }; /** * Request body for Clear Task Instances endpoint. */ export type PatchTaskInstanceBody = { - new_state?: TaskInstanceState | null; - note?: string | null; - include_upstream?: boolean; - include_downstream?: boolean; - include_future?: boolean; - include_past?: boolean; + new_state?: TaskInstanceState | null; + note?: string | null; + include_upstream?: boolean; + include_downstream?: boolean; + include_future?: boolean; + include_past?: boolean; }; /** * Plugin Collection serializer. */ export type PluginCollectionResponse = { - plugins: Array; - total_entries: number; + plugins: Array; + total_entries: number; +}; + +/** + * Plugin Import Error Collection serializer. + */ +export type PluginImportErrorCollectionResponse = { + import_errors: Array; + total_entries: number; +}; + +/** + * Plugin Import Error serializer for responses. + */ +export type PluginImportErrorResponse = { + source: string; + error: string; }; /** * Plugin serializer. */ export type PluginResponse = { - name: string; - macros: Array; - flask_blueprints: Array; - fastapi_apps: Array; - fastapi_root_middlewares: Array; - appbuilder_views: Array; - appbuilder_menu_items: Array; - global_operator_extra_links: Array; - operator_extra_links: Array; - source: string; - listeners: Array; - timetables: Array; + name: string; + macros: Array<(string)>; + flask_blueprints: Array<(string)>; + fastapi_apps: Array; + fastapi_root_middlewares: Array; + /** + * Aggregate all external views. Both 'external_views' and 'appbuilder_menu_items' are included here. + */ + external_views: Array; + react_apps: Array; + appbuilder_views: Array; + /** + * @deprecated + */ + appbuilder_menu_items: Array; + global_operator_extra_links: Array<(string)>; + operator_extra_links: Array<(string)>; + source: string; + listeners: Array<(string)>; + timetables: Array<(string)>; }; /** * Pool serializer for post bodies. */ export type PoolBody = { - name: string; - slots: number; - description?: string | null; - include_deferred?: boolean; + name: string; + slots: number; + description?: string | null; + include_deferred?: boolean; }; /** * Pool Collection serializer for responses. */ export type PoolCollectionResponse = { - pools: Array; - total_entries: number; + pools: Array; + total_entries: number; }; /** * Pool serializer for patch bodies. */ export type PoolPatchBody = { - pool?: string | null; - slots?: number | null; - description?: string | null; - include_deferred?: boolean | null; + pool?: string | null; + slots?: number | null; + description?: string | null; + include_deferred?: boolean | null; }; /** * Pool serializer for responses. */ export type PoolResponse = { - name: string; - slots: number; - description: string | null; - include_deferred: boolean; - occupied_slots: number; - running_slots: number; - queued_slots: number; - scheduled_slots: number; - open_slots: number; - deferred_slots: number; + name: string; + slots: number; + description: string | null; + include_deferred: boolean; + occupied_slots: number; + running_slots: number; + queued_slots: number; + scheduled_slots: number; + open_slots: number; + deferred_slots: number; }; /** * Provider Collection serializer for responses. */ export type ProviderCollectionResponse = { - providers: Array; - total_entries: number; + providers: Array; + total_entries: number; }; /** * Provider serializer for responses. */ export type ProviderResponse = { - package_name: string; - description: string; - version: string; + package_name: string; + description: string; + version: string; }; /** * Queued Event Collection serializer for responses. */ export type QueuedEventCollectionResponse = { - queued_events: Array; - total_entries: number; + queued_events: Array; + total_entries: number; }; /** * Queued Event serializer for responses.. */ export type QueuedEventResponse = { - dag_id: string; - asset_id: number; - created_at: string; + dag_id: string; + asset_id: number; + created_at: string; + dag_display_name: string; }; +/** + * Serializer for React App Plugin responses. + */ +export type ReactAppResponse = { + name: string; + icon?: string | null; + icon_dark_mode?: string | null; + url_route?: string | null; + category?: string | null; + bundle_url: string; + destination?: 'nav' | 'dag' | 'dag_run' | 'task' | 'task_instance' | 'dashboard'; + [key: string]: unknown | string; +}; + +export type destination2 = 'nav' | 'dag' | 'dag_run' | 'task' | 'task_instance' | 'dashboard'; + /** * Internal enum for setting reprocess behavior in a backfill. * * :meta private: */ -export type ReprocessBehavior = "failed" | "completed" | "none"; +export type ReprocessBehavior = 'failed' | 'completed' | 'none'; /** * Scheduler info serializer for responses. */ export type SchedulerInfoResponse = { - status: string | null; - latest_scheduler_heartbeat: string | null; + status: string | null; + latest_scheduler_heartbeat: string | null; }; /** * An individual log message. */ export type StructuredLogMessage = { - timestamp?: string; - event: string; - [key: string]: unknown | string; + timestamp?: string; + event: string; + [key: string]: unknown | string; }; /** * Task collection serializer for responses. */ export type TaskCollectionResponse = { - tasks: Array; - total_entries: number; + tasks: Array; + total_entries: number; }; /** * Task scheduling dependencies collection serializer for responses. */ export type TaskDependencyCollectionResponse = { - dependencies: Array; + dependencies: Array; }; /** * Task Dependency serializer for responses. */ export type TaskDependencyResponse = { - name: string; - reason: string; + name: string; + reason: string; +}; + +/** + * Task inlet reference serializer for assets. + */ +export type TaskInletAssetReference = { + dag_id: string; + task_id: string; + created_at: string; + updated_at: string; }; /** * Task Instance Collection serializer for responses. */ export type TaskInstanceCollectionResponse = { - task_instances: Array; - total_entries: number; + task_instances: Array; + total_entries: number; }; /** * TaskInstanceHistory Collection serializer for responses. */ export type TaskInstanceHistoryCollectionResponse = { - task_instances: Array; - total_entries: number; + task_instances: Array; + total_entries: number; }; /** * TaskInstanceHistory serializer for responses. */ export type TaskInstanceHistoryResponse = { - task_id: string; - dag_id: string; - dag_run_id: string; - map_index: number; - start_date: string | null; - end_date: string | null; - duration: number | null; - state: TaskInstanceState | null; - try_number: number; - max_tries: number; - task_display_name: string; - hostname: string | null; - unixname: string | null; - pool: string; - pool_slots: number; - queue: string | null; - priority_weight: number | null; - operator: string | null; - queued_when: string | null; - scheduled_when: string | null; - pid: number | null; - executor: string | null; - executor_config: string; - dag_version: DagVersionResponse | null; + task_id: string; + dag_id: string; + dag_run_id: string; + map_index: number; + start_date: string | null; + end_date: string | null; + duration: number | null; + state: TaskInstanceState | null; + try_number: number; + max_tries: number; + task_display_name: string; + dag_display_name: string; + hostname: string | null; + unixname: string | null; + pool: string; + pool_slots: number; + queue: string | null; + priority_weight: number | null; + operator: string | null; + queued_when: string | null; + scheduled_when: string | null; + pid: number | null; + executor: string | null; + executor_config: string; + dag_version: DagVersionResponse | null; }; /** * TaskInstance serializer for responses. */ export type TaskInstanceResponse = { - id: string; - task_id: string; - dag_id: string; - dag_run_id: string; - map_index: number; - logical_date: string | null; - run_after: string; - start_date: string | null; - end_date: string | null; - duration: number | null; - state: TaskInstanceState | null; - try_number: number; - max_tries: number; - task_display_name: string; - hostname: string | null; - unixname: string | null; - pool: string; - pool_slots: number; - queue: string | null; - priority_weight: number | null; - operator: string | null; - queued_when: string | null; - scheduled_when: string | null; - pid: number | null; - executor: string | null; - executor_config: string; - note: string | null; - rendered_map_index: string | null; - rendered_fields?: { - [key: string]: unknown; - }; - trigger: TriggerResponse | null; - triggerer_job: JobResponse | null; - dag_version: DagVersionResponse | null; + id: string; + task_id: string; + dag_id: string; + dag_run_id: string; + map_index: number; + logical_date: string | null; + run_after: string; + start_date: string | null; + end_date: string | null; + duration: number | null; + state: TaskInstanceState | null; + try_number: number; + max_tries: number; + task_display_name: string; + dag_display_name: string; + hostname: string | null; + unixname: string | null; + pool: string; + pool_slots: number; + queue: string | null; + priority_weight: number | null; + operator: string | null; + queued_when: string | null; + scheduled_when: string | null; + pid: number | null; + executor: string | null; + executor_config: string; + note: string | null; + rendered_map_index: string | null; + rendered_fields?: { + [key: string]: unknown; + }; + trigger: TriggerResponse | null; + triggerer_job: JobResponse | null; + dag_version: DagVersionResponse | null; }; /** @@ -1141,341 +1338,335 @@ export type TaskInstanceResponse = { * * Note that None is also allowed, so always use this in a type hint with Optional. */ -export type TaskInstanceState = - | "removed" - | "scheduled" - | "queued" - | "running" - | "success" - | "restarting" - | "failed" - | "up_for_retry" - | "up_for_reschedule" - | "upstream_failed" - | "skipped" - | "deferred"; +export type TaskInstanceState = 'removed' | 'scheduled' | 'queued' | 'running' | 'success' | 'restarting' | 'failed' | 'up_for_retry' | 'up_for_reschedule' | 'upstream_failed' | 'skipped' | 'deferred'; /** * Task Instance body for get batch. */ export type TaskInstancesBatchBody = { - dag_ids?: Array | null; - dag_run_ids?: Array | null; - task_ids?: Array | null; - state?: Array | null; - run_after_gte?: string | null; - run_after_lte?: string | null; - logical_date_gte?: string | null; - logical_date_lte?: string | null; - start_date_gte?: string | null; - start_date_lte?: string | null; - end_date_gte?: string | null; - end_date_lte?: string | null; - duration_gte?: number | null; - duration_lte?: number | null; - pool?: Array | null; - queue?: Array | null; - executor?: Array | null; - page_offset?: number; - page_limit?: number; - order_by?: string | null; + dag_ids?: Array<(string)> | null; + dag_run_ids?: Array<(string)> | null; + task_ids?: Array<(string)> | null; + state?: Array<(TaskInstanceState | null)> | null; + run_after_gte?: string | null; + run_after_lte?: string | null; + logical_date_gte?: string | null; + logical_date_lte?: string | null; + start_date_gte?: string | null; + start_date_lte?: string | null; + end_date_gte?: string | null; + end_date_lte?: string | null; + duration_gte?: number | null; + duration_lte?: number | null; + pool?: Array<(string)> | null; + queue?: Array<(string)> | null; + executor?: Array<(string)> | null; + page_offset?: number; + page_limit?: number; + order_by?: string | null; }; /** * Log serializer for responses. */ export type TaskInstancesLogResponse = { - content: Array | Array; - continuation_token: string | null; + content: Array | Array<(string)>; + continuation_token: string | null; }; /** * Task outlet reference serializer for assets. */ export type TaskOutletAssetReference = { - dag_id: string; - task_id: string; - created_at: string; - updated_at: string; + dag_id: string; + task_id: string; + created_at: string; + updated_at: string; }; /** * Task serializer for responses. */ export type TaskResponse = { - task_id: string | null; - task_display_name: string | null; - owner: string | null; - start_date: string | null; - end_date: string | null; - trigger_rule: string | null; - depends_on_past: boolean; - wait_for_downstream: boolean; - retries: number | null; - queue: string | null; - pool: string | null; - pool_slots: number | null; - execution_timeout: TimeDelta | null; - retry_delay: TimeDelta | null; - retry_exponential_backoff: boolean; - priority_weight: number | null; - weight_rule: string | null; - ui_color: string | null; - ui_fgcolor: string | null; - template_fields: Array | null; - downstream_task_ids: Array | null; - doc_md: string | null; - operator_name: string | null; - params: { + task_id: string | null; + task_display_name: string | null; + owner: string | null; + start_date: string | null; + end_date: string | null; + trigger_rule: string | null; + depends_on_past: boolean; + wait_for_downstream: boolean; + retries: number | null; + queue: string | null; + pool: string | null; + pool_slots: number | null; + execution_timeout: TimeDelta | null; + retry_delay: TimeDelta | null; + retry_exponential_backoff: boolean; + priority_weight: number | null; + weight_rule: string | null; + ui_color: string | null; + ui_fgcolor: string | null; + template_fields: Array<(string)> | null; + downstream_task_ids: Array<(string)> | null; + doc_md: string | null; + operator_name: string | null; + params: { [key: string]: unknown; - } | null; - class_ref: { +} | null; + class_ref: { [key: string]: unknown; - } | null; - is_mapped: boolean | null; - /** - * Extract and return extra_links. - */ - readonly extra_links: Array; +} | null; + is_mapped: boolean | null; + /** + * Extract and return extra_links. + */ + readonly extra_links: Array<(string)>; }; /** * TimeDelta can be used to interact with datetime.timedelta objects. */ export type TimeDelta = { - __type?: string; - days: number; - seconds: number; - microseconds: number; + __type?: string; + days: number; + seconds: number; + microseconds: number; }; /** * Trigger DAG Run Serializer for POST body. */ export type TriggerDAGRunPostBody = { - dag_run_id?: string | null; - data_interval_start?: string | null; - data_interval_end?: string | null; - logical_date: string | null; - run_after?: string | null; - conf?: { + dag_run_id?: string | null; + data_interval_start?: string | null; + data_interval_end?: string | null; + logical_date: string | null; + run_after?: string | null; + conf?: { [key: string]: unknown; - }; - note?: string | null; +} | null; + note?: string | null; }; /** * Trigger serializer for responses. */ export type TriggerResponse = { - id: number; - classpath: string; - kwargs: string; - created_date: string; - triggerer_id: number | null; + id: number; + classpath: string; + kwargs: string; + created_date: string; + triggerer_id: number | null; }; /** * Triggerer info serializer for responses. */ export type TriggererInfoResponse = { - status: string | null; - latest_triggerer_heartbeat: string | null; + status: string | null; + latest_triggerer_heartbeat: string | null; +}; + +/** + * Schema for updating the content of a Human-in-the-loop detail. + */ +export type UpdateHITLDetailPayload = { + chosen_options: Array<(string)>; + params_input?: { + [key: string]: unknown; + }; }; export type ValidationError = { - loc: Array; - msg: string; - type: string; + loc: Array<(string | number)>; + msg: string; + type: string; }; /** * Variable serializer for bodies. */ export type VariableBody = { - key: string; - value: string; - description?: string | null; + key: string; + value: JsonValue; + description?: string | null; }; /** * Variable Collection serializer for responses. */ export type VariableCollectionResponse = { - variables: Array; - total_entries: number; + variables: Array; + total_entries: number; }; /** * Variable serializer for responses. */ export type VariableResponse = { - key: string; - value: string; - description: string | null; - is_encrypted: boolean; + key: string; + value: string; + description: string | null; + is_encrypted: boolean; }; /** * Version information serializer for responses. */ export type VersionInfo = { - version: string; - git_version: string | null; + version: string; + git_version: string | null; }; /** * XCom Collection serializer for responses. */ export type XComCollectionResponse = { - xcom_entries: Array; - total_entries: number; + xcom_entries: Array; + total_entries: number; }; /** * Payload serializer for creating an XCom entry. */ export type XComCreateBody = { - key: string; - value: unknown; - map_index?: number; + key: string; + value: unknown; + map_index?: number; }; /** * Serializer for a xcom item. */ export type XComResponse = { - key: string; - timestamp: string; - logical_date: string | null; - map_index: number; - task_id: string; - dag_id: string; - run_id: string; + key: string; + timestamp: string; + logical_date: string | null; + map_index: number; + task_id: string; + dag_id: string; + run_id: string; + dag_display_name: string; }; /** * XCom response serializer with native return type. */ export type XComResponseNative = { - key: string; - timestamp: string; - logical_date: string | null; - map_index: number; - task_id: string; - dag_id: string; - run_id: string; - value: unknown; + key: string; + timestamp: string; + logical_date: string | null; + map_index: number; + task_id: string; + dag_id: string; + run_id: string; + dag_display_name: string; + value: unknown; }; /** * XCom response serializer with string return type. */ export type XComResponseString = { - key: string; - timestamp: string; - logical_date: string | null; - map_index: number; - task_id: string; - dag_id: string; - run_id: string; - value: string | null; + key: string; + timestamp: string; + logical_date: string | null; + map_index: number; + task_id: string; + dag_id: string; + run_id: string; + dag_display_name: string; + value: string | null; }; /** * Payload serializer for updating an XCom entry. */ export type XComUpdateBody = { - value: unknown; - map_index?: number; + value: unknown; + map_index?: number; }; /** * Base Edge serializer for responses. */ export type BaseEdgeResponse = { - source_id: string; - target_id: string; + source_id: string; + target_id: string; }; /** * Base Graph serializer for responses. */ export type BaseGraphResponse = { - edges: Array; - nodes: Array; + edges: Array; + nodes: Array; }; /** * Base Node serializer for responses. */ export type BaseNodeResponse = { - id: string; - label: string; - type: - | "join" - | "task" - | "asset-condition" - | "asset" - | "asset-alias" - | "asset-name-ref" - | "asset-uri-ref" - | "dag" - | "sensor" - | "trigger"; -}; - -export type type = - | "join" - | "task" - | "asset-condition" - | "asset" - | "asset-alias" - | "asset-name-ref" - | "asset-uri-ref" - | "dag" - | "sensor" - | "trigger"; + id: string; + label: string; + type: 'join' | 'task' | 'asset-condition' | 'asset' | 'asset-alias' | 'asset-name-ref' | 'asset-uri-ref' | 'dag' | 'sensor' | 'trigger'; +}; + +export type type = 'join' | 'task' | 'asset-condition' | 'asset' | 'asset-alias' | 'asset-name-ref' | 'asset-uri-ref' | 'dag' | 'sensor' | 'trigger'; + +/** + * Response model for calendar time range results. + */ +export type CalendarTimeRangeCollectionResponse = { + total_entries: number; + dag_runs: Array; +}; + +/** + * Represents a summary of DAG runs for a specific calendar time range. + */ +export type CalendarTimeRangeResponse = { + date: string; + state: 'queued' | 'running' | 'success' | 'failed' | 'planned'; + count: number; +}; + +export type state = 'queued' | 'running' | 'success' | 'failed' | 'planned'; /** * configuration serializer. */ export type ConfigResponse = { - navbar_color: string; - navbar_text_color: string; - navbar_hover_color: string; - navbar_text_hover_color: string; - page_size: number; - auto_refresh_interval: number; - hide_paused_dags_by_default: boolean; - instance_name: string; - instance_name_has_markup: boolean; - enable_swagger_ui: boolean; - require_confirmation_dag_change: boolean; - default_wrap: boolean; - warn_deployment_exposure: boolean; - audit_view_excluded_events: string; - audit_view_included_events: string; - test_connection: string; - dashboard_alert: Array; + page_size: number; + auto_refresh_interval: number; + hide_paused_dags_by_default: boolean; + instance_name: string; + enable_swagger_ui: boolean; + require_confirmation_dag_change: boolean; + default_wrap: boolean; + test_connection: string; + dashboard_alert: Array; + show_external_log_redirect: boolean; + external_log_name?: string | null; }; /** * A class to store the behavior of each standard field of a Hook. */ export type ConnectionHookFieldBehavior = { - /** - * Flag if the form field should be hidden. - */ - hidden?: boolean; - /** - * Label / title for the field that should be displayed, if re-labelling is needed. Use `None` to display standard title. - */ - title?: string | null; - /** - * Placeholder text that should be populated to the form. - */ - placeholder?: string | null; + /** + * Flag if the form field should be hidden. + */ + hidden?: boolean; + /** + * Label / title for the field that should be displayed, if re-labelling is needed. Use `None` to display standard title. + */ + title?: string | null; + /** + * Placeholder text that should be populated to the form. + */ + placeholder?: string | null; }; /** @@ -1485,457 +1676,478 @@ export type ConnectionHookFieldBehavior = { * the API server/Web UI can use this data to render connection form UI. */ export type ConnectionHookMetaData = { - connection_type: string | null; - hook_class_name: string | null; - default_conn_name: string | null; - hook_name: string; - standard_fields: StandardHookFields | null; - extra_fields: { + connection_type: string | null; + hook_class_name: string | null; + default_conn_name: string | null; + hook_name: string; + standard_fields: StandardHookFields | null; + extra_fields: { [key: string]: unknown; - } | null; +} | null; +}; + +/** + * DAG Run serializer for responses. + */ +export type DAGRunLightResponse = { + id: number; + dag_id: string; + run_id: string; + logical_date: string | null; + run_after: string; + start_date: string | null; + end_date: string | null; + state: DagRunState; }; /** * DAG Run States for responses. */ export type DAGRunStates = { - queued: number; - running: number; - success: number; - failed: number; + queued: number; + running: number; + success: number; + failed: number; }; /** * DAG Run Types for responses. */ export type DAGRunTypes = { - backfill: number; - scheduled: number; - manual: number; - asset_triggered: number; + backfill: number; + scheduled: number; + manual: number; + asset_triggered: number; }; /** * DAG with latest dag runs collection response serializer. */ export type DAGWithLatestDagRunsCollectionResponse = { - total_entries: number; - dags: Array; + total_entries: number; + dags: Array; }; /** * DAG with latest dag runs response serializer. */ export type DAGWithLatestDagRunsResponse = { - dag_id: string; - dag_display_name: string; - is_paused: boolean; - is_stale: boolean; - last_parsed_time: string | null; - last_expired: string | null; - bundle_name: string | null; - relative_fileloc: string | null; - fileloc: string; - description: string | null; - timetable_summary: string | null; - timetable_description: string | null; - tags: Array; - max_active_tasks: number; - max_active_runs: number | null; - max_consecutive_failed_dag_runs: number; - has_task_concurrency_limits: boolean; - has_import_errors: boolean; - next_dagrun_logical_date: string | null; - next_dagrun_data_interval_start: string | null; - next_dagrun_data_interval_end: string | null; - next_dagrun_run_after: string | null; - owners: Array; - asset_expression: { + dag_id: string; + dag_display_name: string; + is_paused: boolean; + is_stale: boolean; + last_parsed_time: string | null; + last_expired: string | null; + bundle_name: string | null; + bundle_version: string | null; + relative_fileloc: string | null; + fileloc: string; + description: string | null; + timetable_summary: string | null; + timetable_description: string | null; + tags: Array; + max_active_tasks: number; + max_active_runs: number | null; + max_consecutive_failed_dag_runs: number; + has_task_concurrency_limits: boolean; + has_import_errors: boolean; + next_dagrun_logical_date: string | null; + next_dagrun_data_interval_start: string | null; + next_dagrun_data_interval_end: string | null; + next_dagrun_run_after: string | null; + owners: Array<(string)>; + asset_expression: { [key: string]: unknown; - } | null; - latest_dag_runs: Array; - /** - * Return file token. - */ - readonly file_token: string; +} | null; + latest_dag_runs: Array; + /** + * Return file token. + */ + readonly file_token: string; +}; + +/** + * Dashboard DAG Stats serializer for responses. + */ +export type DashboardDagStatsResponse = { + active_dag_count: number; + failed_dag_count: number; + running_dag_count: number; + queued_dag_count: number; }; /** * Edge serializer for responses. */ export type EdgeResponse = { - source_id: string; - target_id: string; - is_setup_teardown?: boolean | null; - label?: string | null; - is_source_asset?: boolean | null; + source_id: string; + target_id: string; + is_setup_teardown?: boolean | null; + label?: string | null; + is_source_asset?: boolean | null; }; export type ExtraMenuItem = { - text: string; - href: string; + text: string; + href: string; }; /** - * DAG Run model for the Grid UI. + * Base Node serializer for responses. */ -export type GridDAGRunwithTIs = { - dag_run_id: string; - queued_at: string | null; - start_date: string | null; - end_date: string | null; - run_after: string; - state: DagRunState; - run_type: DagRunType; - logical_date: string | null; - data_interval_start: string | null; - data_interval_end: string | null; - note: string | null; - task_instances: Array; +export type GridNodeResponse = { + id: string; + label: string; + children?: Array | null; + is_mapped: boolean | null; + setup_teardown_type?: 'setup' | 'teardown' | null; }; /** - * Response model for the Grid UI. + * Base Node serializer for responses. */ -export type GridResponse = { - dag_runs: Array; - structure: StructureDataResponse; +export type GridRunsResponse = { + dag_id: string; + run_id: string; + queued_at: string | null; + start_date: string | null; + end_date: string | null; + run_after: string; + state: TaskInstanceState | null; + run_type: DagRunType; + readonly duration: number; }; /** - * Task Instance Summary model for the Grid UI. + * DAG Run model for the Grid UI. */ -export type GridTaskInstanceSummary = { - task_id: string; - try_number: number; - start_date: string | null; - end_date: string | null; - queued_dttm: string | null; - child_states: { - [key: string]: number; - } | null; - task_count: number; - state: TaskInstanceState | null; - note: string | null; +export type GridTISummaries = { + run_id: string; + dag_id: string; + task_instances: Array; }; /** * Historical Metric Data serializer for responses. */ export type HistoricalMetricDataResponse = { - dag_run_types: DAGRunTypes; - dag_run_states: DAGRunStates; - task_instance_states: TaskInstanceStateCount; + dag_run_types: DAGRunTypes; + dag_run_states: DAGRunStates; + task_instance_states: TaskInstanceStateCount; +}; + +/** + * Task Instance Summary model for the Grid UI. + */ +export type LightGridTaskInstanceSummary = { + task_id: string; + state: TaskInstanceState | null; + child_states: { + [key: string]: (number); +} | null; + min_start_date: string | null; + max_end_date: string | null; }; /** * Define all menu items defined in the menu. */ -export type MenuItem = - | "Assets" - | "Audit Log" - | "Config" - | "Connections" - | "Dags" - | "Docs" - | "Plugins" - | "Pools" - | "Providers" - | "Variables" - | "XComs"; +export type MenuItem = 'Required Actions' | 'Assets' | 'Audit Log' | 'Config' | 'Connections' | 'Dags' | 'Docs' | 'Plugins' | 'Pools' | 'Providers' | 'Variables' | 'XComs'; /** * Menu Item Collection serializer for responses. */ export type MenuItemCollectionResponse = { - authorized_menu_items: Array; - extra_menu_items: Array; + authorized_menu_items: Array; + extra_menu_items: Array; }; /** * Node serializer for responses. */ export type NodeResponse = { - id: string; - label: string; - type: - | "join" - | "task" - | "asset-condition" - | "asset" - | "asset-alias" - | "asset-name-ref" - | "asset-uri-ref" - | "dag" - | "sensor" - | "trigger"; - children?: Array | null; - is_mapped?: boolean | null; - tooltip?: string | null; - setup_teardown_type?: "setup" | "teardown" | null; - operator?: string | null; - asset_condition_type?: "or-gate" | "and-gate" | null; + id: string; + label: string; + type: 'join' | 'task' | 'asset-condition' | 'asset' | 'asset-alias' | 'asset-name-ref' | 'asset-uri-ref' | 'dag' | 'sensor' | 'trigger'; + children?: Array | null; + is_mapped?: boolean | null; + tooltip?: string | null; + setup_teardown_type?: 'setup' | 'teardown' | null; + operator?: string | null; + asset_condition_type?: 'or-gate' | 'and-gate' | null; }; /** * Standard fields of a Hook that a form will render. */ export type StandardHookFields = { - description: ConnectionHookFieldBehavior | null; - url_schema: ConnectionHookFieldBehavior | null; - host: ConnectionHookFieldBehavior | null; - port: ConnectionHookFieldBehavior | null; - login: ConnectionHookFieldBehavior | null; - password: ConnectionHookFieldBehavior | null; + description: ConnectionHookFieldBehavior | null; + url_schema: ConnectionHookFieldBehavior | null; + host: ConnectionHookFieldBehavior | null; + port: ConnectionHookFieldBehavior | null; + login: ConnectionHookFieldBehavior | null; + password: ConnectionHookFieldBehavior | null; }; /** * Structure Data serializer for responses. */ export type StructureDataResponse = { - edges: Array; - nodes: Array; + edges: Array; + nodes: Array; }; /** * TaskInstance serializer for responses. */ export type TaskInstanceStateCount = { - no_status: number; - removed: number; - scheduled: number; - queued: number; - running: number; - success: number; - restarting: number; - failed: number; - up_for_retry: number; - up_for_reschedule: number; - upstream_failed: number; - skipped: number; - deferred: number; + no_status: number; + removed: number; + scheduled: number; + queued: number; + running: number; + success: number; + restarting: number; + failed: number; + up_for_retry: number; + up_for_reschedule: number; + upstream_failed: number; + skipped: number; + deferred: number; }; /** * Optional alert to be shown at the top of the page. */ export type UIAlert = { - text: string; - category: "info" | "warning" | "error"; + text: string; + category: 'info' | 'warning' | 'error'; }; -export type category = "info" | "warning" | "error"; +export type category = 'info' | 'warning' | 'error'; export type GetAssetsData = { - dagIds?: Array; - limit?: number; - namePattern?: string | null; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string | null; + dagIds?: Array<(string)>; + limit?: number; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + namePattern?: string | null; + offset?: number; + onlyActive?: boolean; + orderBy?: Array<(string)>; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + uriPattern?: string | null; }; export type GetAssetsResponse = AssetCollectionResponse; export type GetAssetAliasesData = { - limit?: number; - namePattern?: string | null; - offset?: number; - orderBy?: string; + limit?: number; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + namePattern?: string | null; + offset?: number; + orderBy?: Array<(string)>; }; export type GetAssetAliasesResponse = AssetAliasCollectionResponse; export type GetAssetAliasData = { - assetAliasId: number; + assetAliasId: number; }; export type GetAssetAliasResponse = unknown; export type GetAssetEventsData = { - assetId?: number | null; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string | null; - sourceMapIndex?: number | null; - sourceRunId?: string | null; - sourceTaskId?: string | null; - timestampGte?: string | null; - timestampLte?: string | null; + assetId?: number | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + sourceDagId?: string | null; + sourceMapIndex?: number | null; + sourceRunId?: string | null; + sourceTaskId?: string | null; + timestampGte?: string | null; + timestampLte?: string | null; }; export type GetAssetEventsResponse = AssetEventCollectionResponse; export type CreateAssetEventData = { - requestBody: CreateAssetEventsBody; + requestBody: CreateAssetEventsBody; }; export type CreateAssetEventResponse = AssetEventResponse; export type MaterializeAssetData = { - assetId: number; + assetId: number; }; export type MaterializeAssetResponse = DAGRunResponse; export type GetAssetQueuedEventsData = { - assetId: number; - before?: string | null; + assetId: number; + before?: string | null; }; export type GetAssetQueuedEventsResponse = QueuedEventCollectionResponse; export type DeleteAssetQueuedEventsData = { - assetId: number; - before?: string | null; + assetId: number; + before?: string | null; }; export type DeleteAssetQueuedEventsResponse = void; export type GetAssetData = { - assetId: number; + assetId: number; }; export type GetAssetResponse = AssetResponse; export type GetDagAssetQueuedEventsData = { - before?: string | null; - dagId: string; + before?: string | null; + dagId: string; }; export type GetDagAssetQueuedEventsResponse = QueuedEventCollectionResponse; export type DeleteDagAssetQueuedEventsData = { - before?: string | null; - dagId: string; + before?: string | null; + dagId: string; }; export type DeleteDagAssetQueuedEventsResponse = void; export type GetDagAssetQueuedEventData = { - assetId: number; - before?: string | null; - dagId: string; + assetId: number; + before?: string | null; + dagId: string; }; export type GetDagAssetQueuedEventResponse = QueuedEventResponse; export type DeleteDagAssetQueuedEventData = { - assetId: number; - before?: string | null; - dagId: string; + assetId: number; + before?: string | null; + dagId: string; }; export type DeleteDagAssetQueuedEventResponse = void; export type NextRunAssetsData = { - dagId: string; + dagId: string; }; export type NextRunAssetsResponse = { - [key: string]: unknown; + [key: string]: unknown; }; export type ListBackfillsData = { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; }; export type ListBackfillsResponse = BackfillCollectionResponse; export type CreateBackfillData = { - requestBody: BackfillPostBody; + requestBody: BackfillPostBody; }; export type CreateBackfillResponse = BackfillResponse; export type GetBackfillData = { - backfillId: string; + backfillId: number; }; export type GetBackfillResponse = BackfillResponse; export type PauseBackfillData = { - backfillId: unknown; + backfillId: number; }; export type PauseBackfillResponse = BackfillResponse; export type UnpauseBackfillData = { - backfillId: unknown; + backfillId: number; }; export type UnpauseBackfillResponse = BackfillResponse; export type CancelBackfillData = { - backfillId: unknown; + backfillId: number; }; export type CancelBackfillResponse = BackfillResponse; export type CreateBackfillDryRunData = { - requestBody: BackfillPostBody; + requestBody: BackfillPostBody; }; export type CreateBackfillDryRunResponse = DryRunBackfillCollectionResponse; -export type ListBackfills1Data = { - active?: boolean | null; - dagId?: string | null; - limit?: number; - offset?: number; - orderBy?: string; +export type ListBackfillsUiData = { + active?: boolean | null; + dagId?: string | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; }; -export type ListBackfills1Response = BackfillCollectionResponse; +export type ListBackfillsUiResponse = BackfillCollectionResponse; export type DeleteConnectionData = { - connectionId: string; + connectionId: string; }; export type DeleteConnectionResponse = void; export type GetConnectionData = { - connectionId: string; + connectionId: string; }; export type GetConnectionResponse = ConnectionResponse; export type PatchConnectionData = { - connectionId: string; - requestBody: ConnectionBody; - updateMask?: Array | null; + connectionId: string; + requestBody: ConnectionBody; + updateMask?: Array<(string)> | null; }; export type PatchConnectionResponse = ConnectionResponse; export type GetConnectionsData = { - connectionIdPattern?: string | null; - limit?: number; - offset?: number; - orderBy?: string; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + connectionIdPattern?: string | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; }; export type GetConnectionsResponse = ConnectionCollectionResponse; export type PostConnectionData = { - requestBody: ConnectionBody; + requestBody: ConnectionBody; }; export type PostConnectionResponse = ConnectionResponse; export type BulkConnectionsData = { - requestBody: BulkBody_ConnectionBody_; + requestBody: BulkBody_ConnectionBody_; }; export type BulkConnectionsResponse = BulkResponse; export type TestConnectionData = { - requestBody: ConnectionBody; + requestBody: ConnectionBody; }; export type TestConnectionResponse = ConnectionTestResponse; @@ -1945,109 +2157,132 @@ export type CreateDefaultConnectionsResponse = void; export type HookMetaDataResponse = Array; export type GetDagRunData = { - dagId: string; - dagRunId: string; + dagId: string; + dagRunId: string; }; export type GetDagRunResponse = DAGRunResponse; export type DeleteDagRunData = { - dagId: string; - dagRunId: string; + dagId: string; + dagRunId: string; }; export type DeleteDagRunResponse = void; export type PatchDagRunData = { - dagId: string; - dagRunId: string; - requestBody: DAGRunPatchBody; - updateMask?: Array | null; + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: Array<(string)> | null; }; export type PatchDagRunResponse = DAGRunResponse; export type GetUpstreamAssetEventsData = { - dagId: string; - dagRunId: string; + dagId: string; + dagRunId: string; }; export type GetUpstreamAssetEventsResponse = AssetEventCollectionResponse; export type ClearDagRunData = { - dagId: string; - dagRunId: string; - requestBody: DAGRunClearBody; + dagId: string; + dagRunId: string; + requestBody: DAGRunClearBody; }; export type ClearDagRunResponse = TaskInstanceCollectionResponse | DAGRunResponse; export type GetDagRunsData = { - dagId: string; - endDateGte?: string | null; - endDateLte?: string | null; - limit?: number; - logicalDateGte?: string | null; - logicalDateLte?: string | null; - offset?: number; - orderBy?: string; - runAfterGte?: string | null; - runAfterLte?: string | null; - runType?: Array; - startDateGte?: string | null; - startDateLte?: string | null; - state?: Array; - updatedAtGte?: string | null; - updatedAtLte?: string | null; + dagId: string; + endDateGte?: string | null; + endDateLte?: string | null; + limit?: number; + logicalDateGte?: string | null; + logicalDateLte?: string | null; + offset?: number; + orderBy?: Array<(string)>; + runAfterGte?: string | null; + runAfterLte?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + runIdPattern?: string | null; + runType?: Array<(string)>; + startDateGte?: string | null; + startDateLte?: string | null; + state?: Array<(string)>; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + triggeringUserNamePattern?: string | null; + updatedAtGte?: string | null; + updatedAtLte?: string | null; }; export type GetDagRunsResponse = DAGRunCollectionResponse; export type TriggerDagRunData = { - dagId: unknown; - requestBody: TriggerDAGRunPostBody; + dagId: unknown; + requestBody: TriggerDAGRunPostBody; }; export type TriggerDagRunResponse = DAGRunResponse; +export type WaitDagRunUntilFinishedData = { + dagId: string; + dagRunId: string; + /** + * Seconds to wait between dag run state checks + */ + interval: number; + /** + * Collect result XCom from task. Can be set multiple times. + */ + result?: Array<(string)> | null; +}; + +export type WaitDagRunUntilFinishedResponse = unknown; + export type GetListDagRunsBatchData = { - dagId: "~"; - requestBody: DAGRunsBatchBody; + dagId: "~"; + requestBody: DAGRunsBatchBody; }; export type GetListDagRunsBatchResponse = DAGRunCollectionResponse; export type GetDagSourceData = { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number | null; + accept?: 'application/json' | 'text/plain' | '*/*'; + dagId: string; + versionNumber?: number | null; }; export type GetDagSourceResponse = DAGSourceResponse; export type GetDagStatsData = { - dagIds?: Array; + dagIds?: Array<(string)>; }; export type GetDagStatsResponse = DagStatsCollectionResponse; export type GetDagReportsData = { - subdir: string; + subdir: string; }; export type GetDagReportsResponse = unknown; export type GetConfigData = { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string | null; + accept?: 'application/json' | 'text/plain' | '*/*'; + section?: string | null; }; export type GetConfigResponse = Config; export type GetConfigValueData = { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; + accept?: 'application/json' | 'text/plain' | '*/*'; + option: string; + section: string; }; export type GetConfigValueResponse = Config; @@ -2055,3433 +2290,4031 @@ export type GetConfigValueResponse = Config; export type GetConfigsResponse = ConfigResponse; export type ListDagWarningsData = { - dagId?: string | null; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType | null; + dagId?: string | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + warningType?: DagWarningType | null; }; export type ListDagWarningsResponse = DAGWarningCollectionResponse; export type GetDagsData = { - dagDisplayNamePattern?: string | null; - dagIdPattern?: string | null; - dagRunEndDateGte?: string | null; - dagRunEndDateLte?: string | null; - dagRunStartDateGte?: string | null; - dagRunStartDateLte?: string | null; - dagRunState?: Array; - excludeStale?: boolean; - lastDagRunState?: DagRunState | null; - limit?: number; - offset?: number; - orderBy?: string; - owners?: Array; - paused?: boolean | null; - tags?: Array; - tagsMatchMode?: "any" | "all" | null; + bundleName?: string | null; + bundleVersion?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + dagDisplayNamePattern?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + dagIdPattern?: string | null; + dagRunEndDateGte?: string | null; + dagRunEndDateLte?: string | null; + dagRunStartDateGte?: string | null; + dagRunStartDateLte?: string | null; + dagRunState?: Array<(string)>; + excludeStale?: boolean; + isFavorite?: boolean | null; + lastDagRunState?: DagRunState | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + owners?: Array<(string)>; + paused?: boolean | null; + tags?: Array<(string)>; + tagsMatchMode?: 'any' | 'all' | null; }; export type GetDagsResponse = DAGCollectionResponse; export type PatchDagsData = { - dagIdPattern?: string | null; - excludeStale?: boolean; - lastDagRunState?: DagRunState | null; - limit?: number; - offset?: number; - owners?: Array; - paused?: boolean | null; - requestBody: DAGPatchBody; - tags?: Array; - tagsMatchMode?: "any" | "all" | null; - updateMask?: Array | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + dagIdPattern?: string | null; + excludeStale?: boolean; + limit?: number; + offset?: number; + owners?: Array<(string)>; + paused?: boolean | null; + requestBody: DAGPatchBody; + tags?: Array<(string)>; + tagsMatchMode?: 'any' | 'all' | null; + updateMask?: Array<(string)> | null; }; export type PatchDagsResponse = DAGCollectionResponse; export type GetDagData = { - dagId: string; + dagId: string; }; export type GetDagResponse = DAGResponse; export type PatchDagData = { - dagId: string; - requestBody: DAGPatchBody; - updateMask?: Array | null; + dagId: string; + requestBody: DAGPatchBody; + updateMask?: Array<(string)> | null; }; export type PatchDagResponse = DAGResponse; export type DeleteDagData = { - dagId: string; + dagId: string; }; export type DeleteDagResponse = unknown; export type GetDagDetailsData = { - dagId: string; + dagId: string; }; export type GetDagDetailsResponse = DAGDetailsResponse; +export type FavoriteDagData = { + dagId: string; +}; + +export type FavoriteDagResponse = void; + +export type UnfavoriteDagData = { + dagId: string; +}; + +export type UnfavoriteDagResponse = void; + export type GetDagTagsData = { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + tagNamePattern?: string | null; }; export type GetDagTagsResponse = DAGTagCollectionResponse; +export type GetDagsUiData = { + bundleName?: string | null; + bundleVersion?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + dagDisplayNamePattern?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + dagIdPattern?: string | null; + dagIds?: Array<(string)> | null; + dagRunsLimit?: number; + excludeStale?: boolean; + isFavorite?: boolean | null; + lastDagRunState?: DagRunState | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + owners?: Array<(string)>; + paused?: boolean | null; + tags?: Array<(string)>; + tagsMatchMode?: 'any' | 'all' | null; +}; + +export type GetDagsUiResponse = DAGWithLatestDagRunsCollectionResponse; + +export type GetLatestRunInfoData = { + dagId: string; +}; + +export type GetLatestRunInfoResponse = DAGRunLightResponse | null; + export type GetEventLogData = { - eventLogId: number; + eventLogId: number; }; export type GetEventLogResponse = EventLogResponse; export type GetEventLogsData = { - after?: string | null; - before?: string | null; - dagId?: string | null; - event?: string | null; - excludedEvents?: Array | null; - includedEvents?: Array | null; - limit?: number; - mapIndex?: number | null; - offset?: number; - orderBy?: string; - owner?: string | null; - runId?: string | null; - taskId?: string | null; - tryNumber?: number | null; + after?: string | null; + before?: string | null; + dagId?: string | null; + event?: string | null; + excludedEvents?: Array<(string)> | null; + includedEvents?: Array<(string)> | null; + limit?: number; + mapIndex?: number | null; + offset?: number; + orderBy?: Array<(string)>; + owner?: string | null; + runId?: string | null; + taskId?: string | null; + tryNumber?: number | null; }; export type GetEventLogsResponse = EventLogCollectionResponse; export type GetExtraLinksData = { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; }; export type GetExtraLinksResponse = ExtraLinkCollectionResponse; export type GetTaskInstanceData = { - dagId: string; - dagRunId: string; - taskId: string; + dagId: string; + dagRunId: string; + taskId: string; }; export type GetTaskInstanceResponse = TaskInstanceResponse; export type PatchTaskInstanceData = { - dagId: string; - dagRunId: string; - mapIndex?: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: Array | null; + dagId: string; + dagRunId: string; + mapIndex?: number | null; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: Array<(string)> | null; }; -export type PatchTaskInstanceResponse = TaskInstanceResponse; +export type PatchTaskInstanceResponse = TaskInstanceCollectionResponse; + +export type DeleteTaskInstanceData = { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; +}; + +export type DeleteTaskInstanceResponse = null; export type GetMappedTaskInstancesData = { - dagId: string; - dagRunId: string; - durationGte?: number | null; - durationLte?: number | null; - endDateGte?: string | null; - endDateLte?: string | null; - executor?: Array; - limit?: number; - logicalDateGte?: string | null; - logicalDateLte?: string | null; - offset?: number; - orderBy?: string; - pool?: Array; - queue?: Array; - runAfterGte?: string | null; - runAfterLte?: string | null; - startDateGte?: string | null; - startDateLte?: string | null; - state?: Array; - taskId: string; - updatedAtGte?: string | null; - updatedAtLte?: string | null; - versionNumber?: Array; + dagId: string; + dagRunId: string; + durationGte?: number | null; + durationLte?: number | null; + endDateGte?: string | null; + endDateLte?: string | null; + executor?: Array<(string)>; + limit?: number; + logicalDateGte?: string | null; + logicalDateLte?: string | null; + offset?: number; + orderBy?: Array<(string)>; + pool?: Array<(string)>; + queue?: Array<(string)>; + runAfterGte?: string | null; + runAfterLte?: string | null; + startDateGte?: string | null; + startDateLte?: string | null; + state?: Array<(string)>; + taskId: string; + updatedAtGte?: string | null; + updatedAtLte?: string | null; + versionNumber?: Array<(number)>; }; export type GetMappedTaskInstancesResponse = TaskInstanceCollectionResponse; -export type GetTaskInstanceDependenciesData = { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; +export type GetTaskInstanceDependenciesByMapIndexData = { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; }; -export type GetTaskInstanceDependenciesResponse = TaskDependencyCollectionResponse; +export type GetTaskInstanceDependenciesByMapIndexResponse = TaskDependencyCollectionResponse; -export type GetTaskInstanceDependencies1Data = { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; +export type GetTaskInstanceDependenciesData = { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; }; -export type GetTaskInstanceDependencies1Response = TaskDependencyCollectionResponse; +export type GetTaskInstanceDependenciesResponse = TaskDependencyCollectionResponse; export type GetTaskInstanceTriesData = { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; }; export type GetTaskInstanceTriesResponse = TaskInstanceHistoryCollectionResponse; export type GetMappedTaskInstanceTriesData = { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; }; export type GetMappedTaskInstanceTriesResponse = TaskInstanceHistoryCollectionResponse; export type GetMappedTaskInstanceData = { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; }; export type GetMappedTaskInstanceResponse = TaskInstanceResponse; -export type PatchTaskInstance1Data = { - dagId: string; - dagRunId: string; - mapIndex: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: Array | null; +export type PatchTaskInstanceByMapIndexData = { + dagId: string; + dagRunId: string; + mapIndex: number | null; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: Array<(string)> | null; }; -export type PatchTaskInstance1Response = TaskInstanceResponse; +export type PatchTaskInstanceByMapIndexResponse = TaskInstanceCollectionResponse; export type GetTaskInstancesData = { - dagId: string; - dagRunId: string; - durationGte?: number | null; - durationLte?: number | null; - endDateGte?: string | null; - endDateLte?: string | null; - executor?: Array; - limit?: number; - logicalDateGte?: string | null; - logicalDateLte?: string | null; - offset?: number; - orderBy?: string; - pool?: Array; - queue?: Array; - runAfterGte?: string | null; - runAfterLte?: string | null; - startDateGte?: string | null; - startDateLte?: string | null; - state?: Array; - taskDisplayNamePattern?: string | null; - taskId?: string | null; - updatedAtGte?: string | null; - updatedAtLte?: string | null; - versionNumber?: Array; + dagId: string; + dagRunId: string; + durationGte?: number | null; + durationLte?: number | null; + endDateGte?: string | null; + endDateLte?: string | null; + executor?: Array<(string)>; + limit?: number; + logicalDateGte?: string | null; + logicalDateLte?: string | null; + offset?: number; + orderBy?: Array<(string)>; + pool?: Array<(string)>; + queue?: Array<(string)>; + runAfterGte?: string | null; + runAfterLte?: string | null; + startDateGte?: string | null; + startDateLte?: string | null; + state?: Array<(string)>; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + taskDisplayNamePattern?: string | null; + taskId?: string | null; + updatedAtGte?: string | null; + updatedAtLte?: string | null; + versionNumber?: Array<(number)>; }; export type GetTaskInstancesResponse = TaskInstanceCollectionResponse; +export type BulkTaskInstancesData = { + dagId: string; + dagRunId: string; + requestBody: BulkBody_BulkTaskInstanceBody_; +}; + +export type BulkTaskInstancesResponse = BulkResponse; + export type GetTaskInstancesBatchData = { - dagId: "~"; - dagRunId: "~"; - requestBody: TaskInstancesBatchBody; + dagId: "~"; + dagRunId: "~"; + requestBody: TaskInstancesBatchBody; }; export type GetTaskInstancesBatchResponse = TaskInstanceCollectionResponse; export type GetTaskInstanceTryDetailsData = { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; }; export type GetTaskInstanceTryDetailsResponse = TaskInstanceHistoryResponse; export type GetMappedTaskInstanceTryDetailsData = { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; }; export type GetMappedTaskInstanceTryDetailsResponse = TaskInstanceHistoryResponse; export type PostClearTaskInstancesData = { - dagId: string; - requestBody: ClearTaskInstancesBody; + dagId: string; + requestBody: ClearTaskInstancesBody; }; export type PostClearTaskInstancesResponse = TaskInstanceCollectionResponse; -export type PatchTaskInstanceDryRunData = { - dagId: string; - dagRunId: string; - mapIndex: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: Array | null; +export type PatchTaskInstanceDryRunByMapIndexData = { + dagId: string; + dagRunId: string; + mapIndex: number | null; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: Array<(string)> | null; }; -export type PatchTaskInstanceDryRunResponse = TaskInstanceCollectionResponse; +export type PatchTaskInstanceDryRunByMapIndexResponse = TaskInstanceCollectionResponse; -export type PatchTaskInstanceDryRun1Data = { - dagId: string; - dagRunId: string; - mapIndex?: number; - requestBody: PatchTaskInstanceBody; - taskId: string; - updateMask?: Array | null; +export type PatchTaskInstanceDryRunData = { + dagId: string; + dagRunId: string; + mapIndex?: number | null; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: Array<(string)> | null; }; -export type PatchTaskInstanceDryRun1Response = TaskInstanceCollectionResponse; +export type PatchTaskInstanceDryRunResponse = TaskInstanceCollectionResponse; export type GetLogData = { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string | null; - tryNumber: number; + accept?: 'application/json' | 'application/x-ndjson' | '*/*'; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string | null; + tryNumber: number; }; export type GetLogResponse = TaskInstancesLogResponse; +export type GetExternalLogUrlData = { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + tryNumber: number; +}; + +export type GetExternalLogUrlResponse = ExternalLogUrlResponse; + export type GetImportErrorData = { - importErrorId: number; + importErrorId: number; }; export type GetImportErrorResponse = ImportErrorResponse; export type GetImportErrorsData = { - limit?: number; - offset?: number; - orderBy?: string; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; }; export type GetImportErrorsResponse = ImportErrorCollectionResponse; export type GetJobsData = { - endDateGte?: string | null; - endDateLte?: string | null; - executorClass?: string | null; - hostname?: string | null; - isAlive?: boolean | null; - jobState?: string | null; - jobType?: string | null; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string | null; - startDateLte?: string | null; + endDateGte?: string | null; + endDateLte?: string | null; + executorClass?: string | null; + hostname?: string | null; + isAlive?: boolean | null; + jobState?: string | null; + jobType?: string | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + startDateGte?: string | null; + startDateLte?: string | null; }; export type GetJobsResponse = JobCollectionResponse; export type GetPluginsData = { - limit?: number; - offset?: number; + limit?: number; + offset?: number; }; export type GetPluginsResponse = PluginCollectionResponse; +export type ImportErrorsResponse = PluginImportErrorCollectionResponse; + export type DeletePoolData = { - poolName: string; + poolName: string; }; export type DeletePoolResponse = void; export type GetPoolData = { - poolName: string; + poolName: string; }; export type GetPoolResponse = PoolResponse; export type PatchPoolData = { - poolName: string; - requestBody: PoolPatchBody; - updateMask?: Array | null; + poolName: string; + requestBody: PoolPatchBody; + updateMask?: Array<(string)> | null; }; export type PatchPoolResponse = PoolResponse; export type GetPoolsData = { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + poolNamePattern?: string | null; }; export type GetPoolsResponse = PoolCollectionResponse; export type PostPoolData = { - requestBody: PoolBody; + requestBody: PoolBody; }; export type PostPoolResponse = PoolResponse; export type BulkPoolsData = { - requestBody: BulkBody_PoolBody_; + requestBody: BulkBody_PoolBody_; }; export type BulkPoolsResponse = BulkResponse; export type GetProvidersData = { - limit?: number; - offset?: number; + limit?: number; + offset?: number; }; export type GetProvidersResponse = ProviderCollectionResponse; export type GetXcomEntryData = { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; }; export type GetXcomEntryResponse = XComResponseNative | XComResponseString; export type UpdateXcomEntryData = { - dagId: string; - dagRunId: string; - requestBody: XComUpdateBody; - taskId: string; - xcomKey: string; + dagId: string; + dagRunId: string; + requestBody: XComUpdateBody; + taskId: string; + xcomKey: string; }; export type UpdateXcomEntryResponse = XComResponseNative; export type GetXcomEntriesData = { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number | null; - offset?: number; - taskId: string; - xcomKey?: string | null; + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number | null; + offset?: number; + taskId: string; + xcomKey?: string | null; }; export type GetXcomEntriesResponse = XComCollectionResponse; export type CreateXcomEntryData = { - dagId: string; - dagRunId: string; - requestBody: XComCreateBody; - taskId: string; + dagId: string; + dagRunId: string; + requestBody: XComCreateBody; + taskId: string; }; export type CreateXcomEntryResponse = XComResponseNative; export type GetTasksData = { - dagId: string; - orderBy?: string; + dagId: string; + orderBy?: string; }; export type GetTasksResponse = TaskCollectionResponse; export type GetTaskData = { - dagId: string; - taskId: unknown; + dagId: string; + taskId: unknown; }; export type GetTaskResponse = TaskResponse; export type DeleteVariableData = { - variableKey: string; + variableKey: string; }; export type DeleteVariableResponse = void; export type GetVariableData = { - variableKey: string; + variableKey: string; }; export type GetVariableResponse = VariableResponse; export type PatchVariableData = { - requestBody: VariableBody; - updateMask?: Array | null; - variableKey: string; + requestBody: VariableBody; + updateMask?: Array<(string)> | null; + variableKey: string; }; export type PatchVariableResponse = VariableResponse; export type GetVariablesData = { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string | null; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + variableKeyPattern?: string | null; }; export type GetVariablesResponse = VariableCollectionResponse; export type PostVariableData = { - requestBody: VariableBody; + requestBody: VariableBody; }; export type PostVariableResponse = VariableResponse; export type BulkVariablesData = { - requestBody: BulkBody_VariableBody_; + requestBody: BulkBody_VariableBody_; }; export type BulkVariablesResponse = BulkResponse; export type ReparseDagFileData = { - fileToken: string; + fileToken: string; }; export type ReparseDagFileResponse = null; export type GetDagVersionData = { - dagId: string; - versionNumber: number; + dagId: string; + versionNumber: number; }; export type GetDagVersionResponse = DagVersionResponse; export type GetDagVersionsData = { - bundleName?: string; - bundleVersion?: string | null; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; + bundleName?: string; + bundleVersion?: string | null; + dagId: string; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + versionNumber?: number; }; export type GetDagVersionsResponse = DAGVersionCollectionResponse; +export type UpdateHitlDetailData = { + dagId: string; + dagRunId: string; + requestBody: UpdateHITLDetailPayload; + taskId: string; +}; + +export type UpdateHitlDetailResponse = HITLDetailResponse; + +export type GetHitlDetailData = { + dagId: string; + dagRunId: string; + taskId: string; +}; + +export type GetHitlDetailResponse = HITLDetail; + +export type UpdateMappedTiHitlDetailData = { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: UpdateHITLDetailPayload; + taskId: string; +}; + +export type UpdateMappedTiHitlDetailResponse = HITLDetailResponse; + +export type GetMappedTiHitlDetailData = { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}; + +export type GetMappedTiHitlDetailResponse = HITLDetail; + +export type GetHitlDetailsData = { + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + bodySearch?: string | null; + dagId?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + dagIdPattern?: string | null; + dagRunId?: string; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + responseReceived?: boolean | null; + state?: Array<(string)>; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + subjectSearch?: string | null; + taskId?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + taskIdPattern?: string | null; + userId?: Array<(string)>; +}; + +export type GetHitlDetailsResponse = HITLDetailCollection; + export type GetHealthResponse = HealthInfoResponse; export type GetVersionResponse = VersionInfo; export type LoginData = { - next?: string | null; + next?: string | null; }; export type LoginResponse = unknown; export type LogoutData = { - next?: string | null; + next?: string | null; }; export type LogoutResponse = unknown; -export type GetAuthMenusResponse = MenuItemCollectionResponse; - -export type RecentDagRunsData = { - dagDisplayNamePattern?: string | null; - dagIdPattern?: string | null; - dagIds?: Array | null; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState | null; - limit?: number; - offset?: number; - owners?: Array; - paused?: boolean | null; - tags?: Array; - tagsMatchMode?: "any" | "all" | null; +export type RefreshData = { + next?: string | null; }; -export type RecentDagRunsResponse = DAGWithLatestDagRunsCollectionResponse; +export type RefreshResponse = unknown; + +export type GetAuthMenusResponse = MenuItemCollectionResponse; export type GetDependenciesData = { - nodeId?: string | null; + nodeId?: string | null; }; export type GetDependenciesResponse = BaseGraphResponse; export type HistoricalMetricsData = { - endDate?: string | null; - startDate: string; + endDate?: string | null; + startDate: string; }; export type HistoricalMetricsResponse = HistoricalMetricDataResponse; +export type DagStatsResponse2 = DashboardDagStatsResponse; + export type StructureDataData = { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string | null; - versionNumber?: number | null; + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string | null; + versionNumber?: number | null; }; export type StructureDataResponse2 = StructureDataResponse; -export type GridDataData = { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string | null; - logicalDateLte?: string | null; - offset?: number; - orderBy?: string; - root?: string | null; - runAfterGte?: string | null; - runAfterLte?: string | null; - runType?: Array; - state?: Array; +export type GetDagStructureData = { + dagId: string; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + runAfterGte?: string | null; + runAfterLte?: string | null; +}; + +export type GetDagStructureResponse = Array; + +export type GetGridRunsData = { + dagId: string; + limit?: number; + offset?: number; + orderBy?: Array<(string)>; + runAfterGte?: string | null; + runAfterLte?: string | null; +}; + +export type GetGridRunsResponse = Array; + +export type GetGridTiSummariesData = { + dagId: string; + runId: string; +}; + +export type GetGridTiSummariesResponse = GridTISummaries; + +export type GetCalendarData = { + dagId: string; + granularity?: 'hourly' | 'daily'; + logicalDateGte?: string | null; + logicalDateLte?: string | null; }; -export type GridDataResponse = GridResponse; +export type GetCalendarResponse = CalendarTimeRangeCollectionResponse; export type $OpenApiTs = { - "/api/v2/assets": { - get: { - req: GetAssetsData; - res: { - /** - * Successful Response - */ - 200: AssetCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/assets/aliases": { - get: { - req: GetAssetAliasesData; - res: { - /** - * Successful Response - */ - 200: AssetAliasCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/assets/aliases/{asset_alias_id}": { - get: { - req: GetAssetAliasData; - res: { - /** - * Successful Response - */ - 200: unknown; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/assets/events": { - get: { - req: GetAssetEventsData; - res: { - /** - * Successful Response - */ - 200: AssetEventCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - post: { - req: CreateAssetEventData; - res: { - /** - * Successful Response - */ - 200: AssetEventResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/assets/{asset_id}/materialize": { - post: { - req: MaterializeAssetData; - res: { - /** - * Successful Response - */ - 200: DAGRunResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/assets/{asset_id}/queuedEvents": { - get: { - req: GetAssetQueuedEventsData; - res: { - /** - * Successful Response - */ - 200: QueuedEventCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - delete: { - req: DeleteAssetQueuedEventsData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/assets/{asset_id}": { - get: { - req: GetAssetData; - res: { - /** - * Successful Response - */ - 200: AssetResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/dags/{dag_id}/assets/queuedEvents": { - get: { - req: GetDagAssetQueuedEventsData; - res: { - /** - * Successful Response - */ - 200: QueuedEventCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - delete: { - req: DeleteDagAssetQueuedEventsData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents": { - get: { - req: GetDagAssetQueuedEventData; - res: { - /** - * Successful Response - */ - 200: QueuedEventResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - delete: { - req: DeleteDagAssetQueuedEventData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; - "/ui/next_run_assets/{dag_id}": { - get: { - req: NextRunAssetsData; - res: { - /** - * Successful Response - */ - 200: { - [key: string]: unknown; + '/api/v2/assets': { + get: { + req: GetAssetsData; + res: { + /** + * Successful Response + */ + 200: AssetCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; }; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; }; - }; - "/api/v2/backfills": { - get: { - req: ListBackfillsData; - res: { - /** - * Successful Response - */ - 200: BackfillCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/assets/aliases': { + get: { + req: GetAssetAliasesData; + res: { + /** + * Successful Response + */ + 200: AssetAliasCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - post: { - req: CreateBackfillData; - res: { - /** - * Successful Response - */ - 200: BackfillResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/assets/aliases/{asset_alias_id}': { + get: { + req: GetAssetAliasData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/backfills/{backfill_id}": { - get: { - req: GetBackfillData; - res: { - /** - * Successful Response - */ - 200: BackfillResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/assets/events': { + get: { + req: GetAssetEventsData; + res: { + /** + * Successful Response + */ + 200: AssetEventCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: CreateAssetEventData; + res: { + /** + * Successful Response + */ + 200: AssetEventResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/backfills/{backfill_id}/pause": { - put: { - req: PauseBackfillData; - res: { - /** - * Successful Response - */ - 200: BackfillResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/assets/{asset_id}/materialize': { + post: { + req: MaterializeAssetData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/backfills/{backfill_id}/unpause": { - put: { - req: UnpauseBackfillData; - res: { - /** - * Successful Response - */ - 200: BackfillResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/assets/{asset_id}/queuedEvents': { + get: { + req: GetAssetQueuedEventsData; + res: { + /** + * Successful Response + */ + 200: QueuedEventCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteAssetQueuedEventsData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/backfills/{backfill_id}/cancel": { - put: { - req: CancelBackfillData; - res: { - /** - * Successful Response - */ - 200: BackfillResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/assets/{asset_id}': { + get: { + req: GetAssetData; + res: { + /** + * Successful Response + */ + 200: AssetResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/backfills/dry_run": { - post: { - req: CreateBackfillDryRunData; - res: { - /** - * Successful Response - */ - 200: DryRunBackfillCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/assets/queuedEvents': { + get: { + req: GetDagAssetQueuedEventsData; + res: { + /** + * Successful Response + */ + 200: QueuedEventCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteDagAssetQueuedEventsData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/backfills": { - get: { - req: ListBackfills1Data; - res: { - /** - * Successful Response - */ - 200: BackfillCollectionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/assets/{asset_id}/queuedEvents': { + get: { + req: GetDagAssetQueuedEventData; + res: { + /** + * Successful Response + */ + 200: QueuedEventResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteDagAssetQueuedEventData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/connections/{connection_id}": { - delete: { - req: DeleteConnectionData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/next_run_assets/{dag_id}': { + get: { + req: NextRunAssetsData; + res: { + /** + * Successful Response + */ + 200: { + [key: string]: unknown; + }; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - get: { - req: GetConnectionData; - res: { - /** - * Successful Response - */ - 200: ConnectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/backfills': { + get: { + req: ListBackfillsData; + res: { + /** + * Successful Response + */ + 200: BackfillCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: CreateBackfillData; + res: { + /** + * Successful Response + */ + 200: BackfillResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: PatchConnectionData; - res: { - /** - * Successful Response - */ - 200: ConnectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/backfills/{backfill_id}': { + get: { + req: GetBackfillData; + res: { + /** + * Successful Response + */ + 200: BackfillResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/connections": { - get: { - req: GetConnectionsData; - res: { - /** - * Successful Response - */ - 200: ConnectionCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/backfills/{backfill_id}/pause': { + put: { + req: PauseBackfillData; + res: { + /** + * Successful Response + */ + 200: BackfillResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - post: { - req: PostConnectionData; - res: { - /** - * Successful Response - */ - 201: ConnectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/backfills/{backfill_id}/unpause': { + put: { + req: UnpauseBackfillData; + res: { + /** + * Successful Response + */ + 200: BackfillResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: BulkConnectionsData; - res: { - /** - * Successful Response - */ - 200: BulkResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/backfills/{backfill_id}/cancel': { + put: { + req: CancelBackfillData; + res: { + /** + * Successful Response + */ + 200: BackfillResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/connections/test": { - post: { - req: TestConnectionData; - res: { - /** - * Successful Response - */ - 200: ConnectionTestResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/backfills/dry_run': { + post: { + req: CreateBackfillDryRunData; + res: { + /** + * Successful Response + */ + 200: DryRunBackfillCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/connections/defaults": { - post: { - res: { - /** - * Successful Response - */ - 204: void; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - }; + '/ui/backfills': { + get: { + req: ListBackfillsUiData; + res: { + /** + * Successful Response + */ + 200: BackfillCollectionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/connections/hook_meta": { - get: { - res: { - /** - * Successful Response - */ - 200: Array; - }; + '/api/v2/connections/{connection_id}': { + delete: { + req: DeleteConnectionData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetConnectionData; + res: { + /** + * Successful Response + */ + 200: ConnectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchConnectionData; + res: { + /** + * Successful Response + */ + 200: ConnectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}": { - get: { - req: GetDagRunData; - res: { - /** - * Successful Response - */ - 200: DAGRunResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/connections': { + get: { + req: GetConnectionsData; + res: { + /** + * Successful Response + */ + 200: ConnectionCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: PostConnectionData; + res: { + /** + * Successful Response + */ + 201: ConnectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: BulkConnectionsData; + res: { + /** + * Successful Response + */ + 200: BulkResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - delete: { - req: DeleteDagRunData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/connections/test': { + post: { + req: TestConnectionData; + res: { + /** + * Successful Response + */ + 200: ConnectionTestResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: PatchDagRunData; - res: { - /** - * Successful Response - */ - 200: DAGRunResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/connections/defaults': { + post: { + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamAssetEvents": { - get: { - req: GetUpstreamAssetEventsData; - res: { - /** - * Successful Response - */ - 200: AssetEventCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/connections/hook_meta': { + get: { + res: { + /** + * Successful Response + */ + 200: Array; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/clear": { - post: { - req: ClearDagRunData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceCollectionResponse | DAGRunResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}': { + get: { + req: GetDagRunData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteDagRunData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchDagRunData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns": { - get: { - req: GetDagRunsData; - res: { - /** - * Successful Response - */ - 200: DAGRunCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamAssetEvents': { + get: { + req: GetUpstreamAssetEventsData; + res: { + /** + * Successful Response + */ + 200: AssetEventCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - post: { - req: TriggerDagRunData; - res: { - /** - * Successful Response - */ - 200: DAGRunResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/clear': { + post: { + req: ClearDagRunData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse | DAGRunResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/list": { - post: { - req: GetListDagRunsBatchData; - res: { - /** - * Successful Response - */ - 200: DAGRunCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns': { + get: { + req: GetDagRunsData; + res: { + /** + * Successful Response + */ + 200: DAGRunCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: TriggerDagRunData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dagSources/{dag_id}": { - get: { - req: GetDagSourceData; - res: { - /** - * Successful Response - */ - 200: DAGSourceResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Not Acceptable - */ - 406: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait': { + get: { + req: WaitDagRunUntilFinishedData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dagStats": { - get: { - req: GetDagStatsData; - res: { - /** - * Successful Response - */ - 200: DagStatsCollectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/list': { + post: { + req: GetListDagRunsBatchData; + res: { + /** + * Successful Response + */ + 200: DAGRunCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dagReports": { - get: { - req: GetDagReportsData; - res: { - /** - * Successful Response - */ - 200: unknown; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dagSources/{dag_id}': { + get: { + req: GetDagSourceData; + res: { + /** + * Successful Response + */ + 200: DAGSourceResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Not Acceptable + */ + 406: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/config": { - get: { - req: GetConfigData; - res: { - /** - * Successful Response - */ - 200: Config; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Not Acceptable - */ - 406: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dagStats': { + get: { + req: GetDagStatsData; + res: { + /** + * Successful Response + */ + 200: DagStatsCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/config/section/{section}/option/{option}": { - get: { - req: GetConfigValueData; - res: { - /** - * Successful Response - */ - 200: Config; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Not Acceptable - */ - 406: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dagReports': { + get: { + req: GetDagReportsData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/config": { - get: { - res: { - /** - * Successful Response - */ - 200: ConfigResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - }; + '/api/v2/config': { + get: { + req: GetConfigData; + res: { + /** + * Successful Response + */ + 200: Config; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Not Acceptable + */ + 406: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dagWarnings": { - get: { - req: ListDagWarningsData; - res: { - /** - * Successful Response - */ - 200: DAGWarningCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/config/section/{section}/option/{option}': { + get: { + req: GetConfigValueData; + res: { + /** + * Successful Response + */ + 200: Config; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Not Acceptable + */ + 406: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags": { - get: { - req: GetDagsData; - res: { - /** - * Successful Response - */ - 200: DAGCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/config': { + get: { + res: { + /** + * Successful Response + */ + 200: ConfigResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + }; + }; }; - patch: { - req: PatchDagsData; - res: { - /** - * Successful Response - */ - 200: DAGCollectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dagWarnings': { + get: { + req: ListDagWarningsData; + res: { + /** + * Successful Response + */ + 200: DAGWarningCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}": { - get: { - req: GetDagData; - res: { - /** - * Successful Response - */ - 200: DAGResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Unprocessable Entity - */ - 422: HTTPExceptionResponse; - }; + '/api/v2/dags': { + get: { + req: GetDagsData; + res: { + /** + * Successful Response + */ + 200: DAGCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchDagsData; + res: { + /** + * Successful Response + */ + 200: DAGCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: PatchDagData; - res: { - /** - * Successful Response - */ - 200: DAGResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}': { + get: { + req: GetDagData; + res: { + /** + * Successful Response + */ + 200: DAGResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Unprocessable Entity + */ + 422: HTTPExceptionResponse; + }; + }; + patch: { + req: PatchDagData; + res: { + /** + * Successful Response + */ + 200: DAGResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteDagData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Unprocessable Entity + */ + 422: HTTPExceptionResponse; + }; + }; }; - delete: { - req: DeleteDagData; - res: { - /** - * Successful Response - */ - 200: unknown; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Unprocessable Entity - */ - 422: HTTPExceptionResponse; - }; + '/api/v2/dags/{dag_id}/details': { + get: { + req: GetDagDetailsData; + res: { + /** + * Successful Response + */ + 200: DAGDetailsResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/details": { - get: { - req: GetDagDetailsData; - res: { - /** - * Successful Response - */ - 200: DAGDetailsResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/favorite': { + post: { + req: FavoriteDagData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dagTags": { - get: { - req: GetDagTagsData; - res: { - /** - * Successful Response - */ - 200: DAGTagCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/unfavorite': { + post: { + req: UnfavoriteDagData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/eventLogs/{event_log_id}": { - get: { - req: GetEventLogData; - res: { - /** - * Successful Response - */ - 200: EventLogResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dagTags': { + get: { + req: GetDagTagsData; + res: { + /** + * Successful Response + */ + 200: DAGTagCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/eventLogs": { - get: { - req: GetEventLogsData; - res: { - /** - * Successful Response - */ - 200: EventLogCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/dags': { + get: { + req: GetDagsUiData; + res: { + /** + * Successful Response + */ + 200: DAGWithLatestDagRunsCollectionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links": { - get: { - req: GetExtraLinksData; - res: { - /** - * Successful Response - */ - 200: ExtraLinkCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/dags/{dag_id}/latest_run': { + get: { + req: GetLatestRunInfoData; + res: { + /** + * Successful Response + */ + 200: DAGRunLightResponse | null; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}": { - get: { - req: GetTaskInstanceData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/eventLogs/{event_log_id}': { + get: { + req: GetEventLogData; + res: { + /** + * Successful Response + */ + 200: EventLogResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: PatchTaskInstanceData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/eventLogs': { + get: { + req: GetEventLogsData; + res: { + /** + * Successful Response + */ + 200: EventLogCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped": { - get: { - req: GetMappedTaskInstancesData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/links': { + get: { + req: GetExtraLinksData; + res: { + /** + * Successful Response + */ + 200: ExtraLinkCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies": { - get: { - req: GetTaskInstanceDependenciesData; - res: { - /** - * Successful Response - */ - 200: TaskDependencyCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}': { + get: { + req: GetTaskInstanceData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchTaskInstanceData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteTaskInstanceData; + res: { + /** + * Successful Response + */ + 200: null; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies": { - get: { - req: GetTaskInstanceDependencies1Data; - res: { - /** - * Successful Response - */ - 200: TaskDependencyCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/listMapped': { + get: { + req: GetMappedTaskInstancesData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries": { - get: { - req: GetTaskInstanceTriesData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceHistoryCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dependencies': { + get: { + req: GetTaskInstanceDependenciesByMapIndexData; + res: { + /** + * Successful Response + */ + 200: TaskDependencyCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries": { - get: { - req: GetMappedTaskInstanceTriesData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceHistoryCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dependencies': { + get: { + req: GetTaskInstanceDependenciesData; + res: { + /** + * Successful Response + */ + 200: TaskDependencyCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}": { - get: { - req: GetMappedTaskInstanceData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries': { + get: { + req: GetTaskInstanceTriesData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceHistoryCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: PatchTaskInstance1Data; - res: { - /** - * Successful Response - */ - 200: TaskInstanceResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries': { + get: { + req: GetMappedTaskInstanceTriesData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceHistoryCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances": { - get: { - req: GetTaskInstancesData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}': { + get: { + req: GetMappedTaskInstanceData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchTaskInstanceByMapIndexData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list": { - post: { - req: GetTaskInstancesBatchData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances': { + get: { + req: GetTaskInstancesData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: BulkTaskInstancesData; + res: { + /** + * Successful Response + */ + 200: BulkResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}": { - get: { - req: GetTaskInstanceTryDetailsData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceHistoryResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/list': { + post: { + req: GetTaskInstancesBatchData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}": { - get: { - req: GetMappedTaskInstanceTryDetailsData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceHistoryResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/tries/{task_try_number}': { + get: { + req: GetTaskInstanceTryDetailsData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceHistoryResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/clearTaskInstances": { - post: { - req: PostClearTaskInstancesData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/tries/{task_try_number}': { + get: { + req: GetMappedTaskInstanceTryDetailsData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceHistoryResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dry_run": { - patch: { - req: PatchTaskInstanceDryRunData; - res: { - /** - * Successful Response - */ - 200: TaskInstanceCollectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/clearTaskInstances': { + post: { + req: PostClearTaskInstancesData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dry_run": { - patch: { - req: PatchTaskInstanceDryRun1Data; - res: { - /** - * Successful Response - */ - 200: TaskInstanceCollectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}/dry_run': { + patch: { + req: PatchTaskInstanceDryRunByMapIndexData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}": { - get: { - req: GetLogData; - res: { - /** - * Successful Response - */ - 200: TaskInstancesLogResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/dry_run': { + patch: { + req: PatchTaskInstanceDryRunData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/importErrors/{import_error_id}": { - get: { - req: GetImportErrorData; - res: { - /** - * Successful Response - */ - 200: ImportErrorResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{try_number}': { + get: { + req: GetLogData; + res: { + /** + * Successful Response + */ + 200: TaskInstancesLogResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/importErrors": { - get: { - req: GetImportErrorsData; - res: { - /** - * Successful Response - */ - 200: ImportErrorCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/externalLogUrl/{try_number}': { + get: { + req: GetExternalLogUrlData; + res: { + /** + * Successful Response + */ + 200: ExternalLogUrlResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/jobs": { - get: { - req: GetJobsData; - res: { - /** - * Successful Response - */ - 200: JobCollectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/importErrors/{import_error_id}': { + get: { + req: GetImportErrorData; + res: { + /** + * Successful Response + */ + 200: ImportErrorResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/plugins": { - get: { - req: GetPluginsData; - res: { - /** - * Successful Response - */ - 200: PluginCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/importErrors': { + get: { + req: GetImportErrorsData; + res: { + /** + * Successful Response + */ + 200: ImportErrorCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/pools/{pool_name}": { - delete: { - req: DeletePoolData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/jobs': { + get: { + req: GetJobsData; + res: { + /** + * Successful Response + */ + 200: JobCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - get: { - req: GetPoolData; - res: { - /** - * Successful Response - */ - 200: PoolResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/plugins': { + get: { + req: GetPluginsData; + res: { + /** + * Successful Response + */ + 200: PluginCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: PatchPoolData; - res: { - /** - * Successful Response - */ - 200: PoolResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/plugins/importErrors': { + get: { + res: { + /** + * Successful Response + */ + 200: PluginImportErrorCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + }; + }; }; - }; - "/api/v2/pools": { - get: { - req: GetPoolsData; - res: { - /** - * Successful Response - */ - 200: PoolCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/pools/{pool_name}': { + delete: { + req: DeletePoolData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetPoolData; + res: { + /** + * Successful Response + */ + 200: PoolResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchPoolData; + res: { + /** + * Successful Response + */ + 200: PoolResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - post: { - req: PostPoolData; - res: { - /** - * Successful Response - */ - 201: PoolResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/pools': { + get: { + req: GetPoolsData; + res: { + /** + * Successful Response + */ + 200: PoolCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: PostPoolData; + res: { + /** + * Successful Response + */ + 201: PoolResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: BulkPoolsData; + res: { + /** + * Successful Response + */ + 200: BulkResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: BulkPoolsData; - res: { - /** - * Successful Response - */ - 200: BulkResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/providers': { + get: { + req: GetProvidersData; + res: { + /** + * Successful Response + */ + 200: ProviderCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/providers": { - get: { - req: GetProvidersData; - res: { - /** - * Successful Response - */ - 200: ProviderCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}': { + get: { + req: GetXcomEntryData; + res: { + /** + * Successful Response + */ + 200: XComResponseNative | XComResponseString; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: UpdateXcomEntryData; + res: { + /** + * Successful Response + */ + 200: XComResponseNative; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}": { - get: { - req: GetXcomEntryData; - res: { - /** - * Successful Response - */ - 200: XComResponseNative | XComResponseString; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries': { + get: { + req: GetXcomEntriesData; + res: { + /** + * Successful Response + */ + 200: XComCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: CreateXcomEntryData; + res: { + /** + * Successful Response + */ + 201: XComResponseNative; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: UpdateXcomEntryData; - res: { - /** - * Successful Response - */ - 200: XComResponseNative; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/tasks': { + get: { + req: GetTasksData; + res: { + /** + * Successful Response + */ + 200: TaskCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries": { - get: { - req: GetXcomEntriesData; - res: { - /** - * Successful Response - */ - 200: XComCollectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/tasks/{task_id}': { + get: { + req: GetTaskData; + res: { + /** + * Successful Response + */ + 200: TaskResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - post: { - req: CreateXcomEntryData; - res: { - /** - * Successful Response - */ - 201: XComResponseNative; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/variables/{variable_key}': { + delete: { + req: DeleteVariableData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetVariableData; + res: { + /** + * Successful Response + */ + 200: VariableResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchVariableData; + res: { + /** + * Successful Response + */ + 200: VariableResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/tasks": { - get: { - req: GetTasksData; - res: { - /** - * Successful Response - */ - 200: TaskCollectionResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/variables': { + get: { + req: GetVariablesData; + res: { + /** + * Successful Response + */ + 200: VariableCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: PostVariableData; + res: { + /** + * Successful Response + */ + 201: VariableResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: BulkVariablesData; + res: { + /** + * Successful Response + */ + 200: BulkResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/tasks/{task_id}": { - get: { - req: GetTaskData; - res: { - /** - * Successful Response - */ - 200: TaskResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/parseDagFile/{file_token}': { + put: { + req: ReparseDagFileData; + res: { + /** + * Successful Response + */ + 201: null; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/variables/{variable_key}": { - delete: { - req: DeleteVariableData; - res: { - /** - * Successful Response - */ - 204: void; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagVersions/{version_number}': { + get: { + req: GetDagVersionData; + res: { + /** + * Successful Response + */ + 200: DagVersionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - get: { - req: GetVariableData; - res: { - /** - * Successful Response - */ - 200: VariableResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/dags/{dag_id}/dagVersions': { + get: { + req: GetDagVersionsData; + res: { + /** + * Successful Response + */ + 200: DAGVersionCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: PatchVariableData; - res: { - /** - * Successful Response - */ - 200: VariableResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}': { + patch: { + req: UpdateHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetailResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetail; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/variables": { - get: { - req: GetVariablesData; - res: { - /** - * Successful Response - */ - 200: VariableCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/hitlDetails/{dag_id}/{dag_run_id}/{task_id}/{map_index}': { + patch: { + req: UpdateMappedTiHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetailResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetMappedTiHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetail; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - post: { - req: PostVariableData; - res: { - /** - * Successful Response - */ - 201: VariableResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Conflict - */ - 409: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/hitlDetails/': { + get: { + req: GetHitlDetailsData; + res: { + /** + * Successful Response + */ + 200: HITLDetailCollection; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - patch: { - req: BulkVariablesData; - res: { - /** - * Successful Response - */ - 200: BulkResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/monitor/health': { + get: { + res: { + /** + * Successful Response + */ + 200: HealthInfoResponse; + }; + }; }; - }; - "/api/v2/parseDagFile/{file_token}": { - put: { - req: ReparseDagFileData; - res: { - /** - * Successful Response - */ - 201: null; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/version': { + get: { + res: { + /** + * Successful Response + */ + 200: VersionInfo; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagVersions/{version_number}": { - get: { - req: GetDagVersionData; - res: { - /** - * Successful Response - */ - 200: DagVersionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/auth/login': { + get: { + req: LoginData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Temporary Redirect + */ + 307: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/dags/{dag_id}/dagVersions": { - get: { - req: GetDagVersionsData; - res: { - /** - * Successful Response - */ - 200: DAGVersionCollectionResponse; - /** - * Unauthorized - */ - 401: HTTPExceptionResponse; - /** - * Forbidden - */ - 403: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/api/v2/auth/logout': { + get: { + req: LogoutData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Temporary Redirect + */ + 307: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/monitor/health": { - get: { - res: { - /** - * Successful Response - */ - 200: HealthInfoResponse; - }; + '/api/v2/auth/refresh': { + get: { + req: RefreshData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Temporary Redirect + */ + 307: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/version": { - get: { - res: { - /** - * Successful Response - */ - 200: VersionInfo; - }; + '/ui/auth/menus': { + get: { + res: { + /** + * Successful Response + */ + 200: MenuItemCollectionResponse; + }; + }; }; - }; - "/api/v2/auth/login": { - get: { - req: LoginData; - res: { - /** - * Successful Response - */ - 200: unknown; - /** - * Temporary Redirect - */ - 307: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/dependencies': { + get: { + req: GetDependenciesData; + res: { + /** + * Successful Response + */ + 200: BaseGraphResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/api/v2/auth/logout": { - get: { - req: LogoutData; - res: { - /** - * Successful Response - */ - 200: unknown; - /** - * Temporary Redirect - */ - 307: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/dashboard/historical_metrics_data': { + get: { + req: HistoricalMetricsData; + res: { + /** + * Successful Response + */ + 200: HistoricalMetricDataResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/auth/menus": { - get: { - res: { - /** - * Successful Response - */ - 200: MenuItemCollectionResponse; - }; + '/ui/dashboard/dag_stats': { + get: { + res: { + /** + * Successful Response + */ + 200: DashboardDagStatsResponse; + }; + }; }; - }; - "/ui/dags/recent_dag_runs": { - get: { - req: RecentDagRunsData; - res: { - /** - * Successful Response - */ - 200: DAGWithLatestDagRunsCollectionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/structure/structure_data': { + get: { + req: StructureDataData; + res: { + /** + * Successful Response + */ + 200: StructureDataResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/dependencies": { - get: { - req: GetDependenciesData; - res: { - /** - * Successful Response - */ - 200: BaseGraphResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/grid/structure/{dag_id}': { + get: { + req: GetDagStructureData; + res: { + /** + * Successful Response + */ + 200: Array; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/dashboard/historical_metrics_data": { - get: { - req: HistoricalMetricsData; - res: { - /** - * Successful Response - */ - 200: HistoricalMetricDataResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/grid/runs/{dag_id}': { + get: { + req: GetGridRunsData; + res: { + /** + * Successful Response + */ + 200: Array; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/structure/structure_data": { - get: { - req: StructureDataData; - res: { - /** - * Successful Response - */ - 200: StructureDataResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/grid/ti_summaries/{dag_id}/{run_id}': { + get: { + req: GetGridTiSummariesData; + res: { + /** + * Successful Response + */ + 200: GridTISummaries; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; - "/ui/grid/{dag_id}": { - get: { - req: GridDataData; - res: { - /** - * Successful Response - */ - 200: GridResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; + '/ui/calendar/{dag_id}': { + get: { + req: GetCalendarData; + res: { + /** + * Successful Response + */ + 200: CalendarTimeRangeCollectionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; - }; -}; +}; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-merge.json b/airflow-core/src/airflow/ui/openapi-merge.json index 7b4bcb75ca15f..059cadf6fd507 100644 --- a/airflow-core/src/airflow/ui/openapi-merge.json +++ b/airflow-core/src/airflow/ui/openapi-merge.json @@ -1,7 +1,7 @@ { "inputs": [ { - "inputFile": "../api_fastapi/core_api/openapi/v1-rest-api-generated.yaml" + "inputFile": "../api_fastapi/core_api/openapi/v2-rest-api-generated.yaml" }, { "inputFile": "../api_fastapi/core_api/openapi/_private_ui.yaml" diff --git a/airflow-core/src/airflow/ui/package.json b/airflow-core/src/airflow/ui/package.json index 7fe31b5842c41..30b4eaedd4fb6 100644 --- a/airflow-core/src/airflow/ui/package.json +++ b/airflow-core/src/airflow/ui/package.json @@ -5,7 +5,7 @@ "type": "module", "homepage": "/ui", "scripts": { - "dev": "vite", + "dev": "vite --port 5173 --strictPort", "build": "vite build", "lint": "eslint --quiet && tsc --p tsconfig.app.json", "lint:fix": "eslint --fix && tsc --p tsconfig.app.json", @@ -17,31 +17,36 @@ }, "dependencies": { "@chakra-ui/anatomy": "^2.3.4", - "@chakra-ui/react": "^3.15.1", + "@chakra-ui/react": "^3.20.0", "@codemirror/lang-json": "^6.0.1", "@emotion/react": "^11.14.0", - "@tanstack/react-query": "^5.69.0", - "@tanstack/react-table": "^8.21.2", + "@tanstack/react-query": "^5.75.1", + "@tanstack/react-table": "^8.21.3", + "@tanstack/react-virtual": "^3.13.8", "@types/debounce-promise": "^3.1.9", - "@uiw/codemirror-themes-all": "^4.23.10", - "@uiw/react-codemirror": "^4.23.10", + "@uiw/codemirror-themes-all": "^4.23.12", + "@uiw/react-codemirror": "^4.23.12", "@visx/group": "^3.12.0", "@visx/shape": "^3.12.0", "@xyflow/react": "^12.4.4", "axios": "^1.8.4", "chakra-react-select": "6.1.0", - "chart.js": "^4.4.8", + "chart.js": "^4.4.9", "chartjs-plugin-annotation": "^3.1.0", "dayjs": "^1.11.13", "debounce-promise": "^3.1.2", "elkjs": "^0.10.0", "html-to-image": "^1.11.13", + "i18next": "^25.1.2", + "i18next-browser-languagedetector": "^8.1.0", + "i18next-http-backend": "^3.0.2", "next-themes": "^0.3.0", "react": "^18.3.1", "react-chartjs-2": "^5.3.0", "react-dom": "^18.3.1", - "react-hook-form": "^7.54.2", + "react-hook-form": "^7.56.1", "react-hotkeys-hook": "^4.6.1", + "react-i18next": "^15.5.1", "react-icons": "^5.5.0", "react-innertext": "^1.1.5", "react-json-view": "^1.21.3", @@ -52,42 +57,54 @@ "remark-gfm": "^4.0.1", "use-debounce": "^10.0.4", "usehooks-ts": "^3.1.1", - "zustand": "^5.0.3" + "node-sql-parser": "^5.3.10", + "yaml": "^2.6.1", + "zustand": "^5.0.4" }, "devDependencies": { "@7nohe/openapi-react-query-codegen": "^1.6.2", - "@eslint/compat": "^1.2.7", - "@eslint/js": "^9.23.0", + "@eslint/compat": "^1.2.9", + "@eslint/js": "^9.25.1", "@stylistic/eslint-plugin": "^2.13.0", - "@tanstack/eslint-plugin-query": "^5.68.0", + "@tanstack/eslint-plugin-query": "^5.74.7", "@testing-library/jest-dom": "^6.6.3", - "@testing-library/react": "^16.2.0", + "@testing-library/react": "^16.3.0", "@trivago/prettier-plugin-sort-imports": "^4.3.0", - "@types/node": "^22.13.11", + "@types/node": "^22.15.3", "@types/react": "^18.3.19", "@types/react-dom": "^18.3.5", "@types/react-syntax-highlighter": "^15.5.13", - "@vitejs/plugin-react-swc": "^3.8.1", + "@vitejs/plugin-react-swc": "^3.9.0", "@vitest/coverage-v8": "^2.1.9", - "eslint": "^9.23.0", - "eslint-config-prettier": "^10.1.1", + "eslint": "^9.25.1", + "eslint-config-prettier": "^10.1.2", + "eslint-plugin-i18next": "^6.1.1", + "eslint-plugin-jsonc": "^2.20.1", "eslint-plugin-jsx-a11y": "^6.10.2", - "eslint-plugin-perfectionist": "^4.10.1", - "eslint-plugin-prettier": "^5.2.3", - "eslint-plugin-react": "^7.37.4", + "eslint-plugin-perfectionist": "^4.12.3", + "eslint-plugin-prettier": "^5.2.6", + "eslint-plugin-react": "^7.37.5", "eslint-plugin-react-hooks": "^4.6.2", - "eslint-plugin-react-refresh": "^0.4.19", + "eslint-plugin-react-refresh": "^0.4.20", "eslint-plugin-unicorn": "^55.0.0", "globals": "^15.15.0", - "happy-dom": "^17.4.4", - "msw": "^2.7.3", + "happy-dom": "^17.4.6", + "jsonc-eslint-parser": "^2.4.0", + "msw": "^2.7.5", "openapi-merge-cli": "^1.3.2", "prettier": "^3.5.3", - "typescript": "~5.5.4", - "typescript-eslint": "^8.27.0", - "vite": "^5.4.17", + "typescript": "~5.8.3", + "typescript-eslint": "^8.31.1", + "vite": "^5.4.19", "vite-plugin-css-injected-by-js": "^3.5.2", "vitest": "^2.1.9", "web-worker": "^1.5.0" + }, + "pnpm": { + "onlyBuiltDependencies": [ + "@swc/core", + "esbuild", + "msw" + ] } } diff --git a/airflow-core/src/airflow/ui/pnpm-lock.yaml b/airflow-core/src/airflow/ui/pnpm-lock.yaml index 6f37bae0fe70d..0f84ca9ef7902 100644 --- a/airflow-core/src/airflow/ui/pnpm-lock.yaml +++ b/airflow-core/src/airflow/ui/pnpm-lock.yaml @@ -12,8 +12,8 @@ importers: specifier: ^2.3.4 version: 2.3.4 '@chakra-ui/react': - specifier: ^3.15.1 - version: 3.15.1(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: ^3.20.0 + version: 3.20.0(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@codemirror/lang-json': specifier: ^6.0.1 version: 6.0.1 @@ -21,20 +21,23 @@ importers: specifier: ^11.14.0 version: 11.14.0(@types/react@18.3.19)(react@18.3.1) '@tanstack/react-query': - specifier: ^5.69.0 - version: 5.69.0(react@18.3.1) + specifier: ^5.75.1 + version: 5.75.4(react@18.3.1) '@tanstack/react-table': - specifier: ^8.21.2 - version: 8.21.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: ^8.21.3 + version: 8.21.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@tanstack/react-virtual': + specifier: ^3.13.8 + version: 3.13.8(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@types/debounce-promise': specifier: ^3.1.9 version: 3.1.9 '@uiw/codemirror-themes-all': - specifier: ^4.23.10 - version: 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + specifier: ^4.23.12 + version: 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) '@uiw/react-codemirror': - specifier: ^4.23.10 - version: 4.23.10(@babel/runtime@7.26.10)(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.36.4)(codemirror@6.0.1(@lezer/common@1.2.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: ^4.23.12 + version: 4.23.12(@babel/runtime@7.26.10)(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.36.4)(codemirror@6.0.1(@lezer/common@1.2.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@visx/group': specifier: ^3.12.0 version: 3.12.0(react@18.3.1) @@ -49,13 +52,13 @@ importers: version: 1.8.4 chakra-react-select: specifier: 6.1.0 - version: 6.1.0(@chakra-ui/react@3.15.1(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.19)(next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 6.1.0(@chakra-ui/react@3.20.0(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.19)(next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) chart.js: - specifier: ^4.4.8 - version: 4.4.8 + specifier: ^4.4.9 + version: 4.4.9 chartjs-plugin-annotation: specifier: ^3.1.0 - version: 3.1.0(chart.js@4.4.8) + version: 3.1.0(chart.js@4.4.9) dayjs: specifier: ^1.11.13 version: 1.11.13 @@ -68,24 +71,39 @@ importers: html-to-image: specifier: ^1.11.13 version: 1.11.13 + i18next: + specifier: ^25.1.2 + version: 25.1.2(typescript@5.8.3) + i18next-browser-languagedetector: + specifier: ^8.1.0 + version: 8.1.0 + i18next-http-backend: + specifier: ^3.0.2 + version: 3.0.2 next-themes: specifier: ^0.3.0 version: 0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + node-sql-parser: + specifier: ^5.3.10 + version: 5.3.10 react: specifier: ^18.3.1 version: 18.3.1 react-chartjs-2: specifier: ^5.3.0 - version: 5.3.0(chart.js@4.4.8)(react@18.3.1) + version: 5.3.0(chart.js@4.4.9)(react@18.3.1) react-dom: specifier: ^18.3.1 version: 18.3.1(react@18.3.1) react-hook-form: - specifier: ^7.54.2 - version: 7.54.2(react@18.3.1) + specifier: ^7.56.1 + version: 7.56.2(react@18.3.1) react-hotkeys-hook: specifier: ^4.6.1 version: 4.6.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-i18next: + specifier: ^15.5.1 + version: 15.5.1(i18next@25.1.2(typescript@5.8.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.8.3) react-icons: specifier: ^5.5.0 version: 5.5.0(react@18.3.1) @@ -116,37 +134,40 @@ importers: usehooks-ts: specifier: ^3.1.1 version: 3.1.1(react@18.3.1) + yaml: + specifier: ^2.6.1 + version: 2.8.0 zustand: - specifier: ^5.0.3 - version: 5.0.3(@types/react@18.3.19)(react@18.3.1)(use-sync-external-store@1.4.0(react@18.3.1)) + specifier: ^5.0.4 + version: 5.0.4(@types/react@18.3.19)(react@18.3.1)(use-sync-external-store@1.4.0(react@18.3.1)) devDependencies: '@7nohe/openapi-react-query-codegen': specifier: ^1.6.2 - version: 1.6.2(commander@12.1.0)(glob@11.0.0)(magicast@0.3.5)(ts-morph@23.0.0)(typescript@5.5.4) + version: 1.6.2(commander@12.1.0)(glob@11.0.0)(magicast@0.3.5)(ts-morph@23.0.0)(typescript@5.8.3) '@eslint/compat': - specifier: ^1.2.7 - version: 1.2.7(eslint@9.23.0(jiti@1.21.7)) + specifier: ^1.2.9 + version: 1.2.9(eslint@9.26.0(jiti@1.21.7)) '@eslint/js': - specifier: ^9.23.0 - version: 9.23.0 + specifier: ^9.25.1 + version: 9.26.0 '@stylistic/eslint-plugin': specifier: ^2.13.0 - version: 2.13.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + version: 2.13.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) '@tanstack/eslint-plugin-query': - specifier: ^5.68.0 - version: 5.68.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + specifier: ^5.74.7 + version: 5.74.7(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) '@testing-library/jest-dom': specifier: ^6.6.3 version: 6.6.3 '@testing-library/react': - specifier: ^16.2.0 - version: 16.2.0(@testing-library/dom@10.4.0)(@types/react-dom@18.3.5(@types/react@18.3.19))(@types/react@18.3.19)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: ^16.3.0 + version: 16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@18.3.5(@types/react@18.3.19))(@types/react@18.3.19)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@trivago/prettier-plugin-sort-imports': specifier: ^4.3.0 version: 4.3.0(prettier@3.5.3) '@types/node': - specifier: ^22.13.11 - version: 22.13.11 + specifier: ^22.15.3 + version: 22.15.14 '@types/react': specifier: ^18.3.19 version: 18.3.19 @@ -157,47 +178,56 @@ importers: specifier: ^15.5.13 version: 15.5.13 '@vitejs/plugin-react-swc': - specifier: ^3.8.1 - version: 3.8.1(@swc/helpers@0.5.15)(vite@5.4.17(@types/node@22.13.11)) + specifier: ^3.9.0 + version: 3.9.0(vite@5.4.19(@types/node@22.15.14)) '@vitest/coverage-v8': specifier: ^2.1.9 - version: 2.1.9(vitest@2.1.9(@types/node@22.13.11)(happy-dom@17.4.4)(msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4))) + version: 2.1.9(vitest@2.1.9(@types/node@22.15.14)(happy-dom@17.4.6)(msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3))) eslint: - specifier: ^9.23.0 - version: 9.23.0(jiti@1.21.7) + specifier: ^9.25.1 + version: 9.26.0(jiti@1.21.7) eslint-config-prettier: - specifier: ^10.1.1 - version: 10.1.1(eslint@9.23.0(jiti@1.21.7)) + specifier: ^10.1.2 + version: 10.1.2(eslint@9.26.0(jiti@1.21.7)) + eslint-plugin-i18next: + specifier: ^6.1.1 + version: 6.1.1 + eslint-plugin-jsonc: + specifier: ^2.20.1 + version: 2.20.1(eslint@9.26.0(jiti@1.21.7)) eslint-plugin-jsx-a11y: specifier: ^6.10.2 - version: 6.10.2(eslint@9.23.0(jiti@1.21.7)) + version: 6.10.2(eslint@9.26.0(jiti@1.21.7)) eslint-plugin-perfectionist: - specifier: ^4.10.1 - version: 4.10.1(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + specifier: ^4.12.3 + version: 4.12.3(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) eslint-plugin-prettier: - specifier: ^5.2.3 - version: 5.2.3(eslint-config-prettier@10.1.1(eslint@9.23.0(jiti@1.21.7)))(eslint@9.23.0(jiti@1.21.7))(prettier@3.5.3) + specifier: ^5.2.6 + version: 5.4.0(eslint-config-prettier@10.1.2(eslint@9.26.0(jiti@1.21.7)))(eslint@9.26.0(jiti@1.21.7))(prettier@3.5.3) eslint-plugin-react: - specifier: ^7.37.4 - version: 7.37.4(eslint@9.23.0(jiti@1.21.7)) + specifier: ^7.37.5 + version: 7.37.5(eslint@9.26.0(jiti@1.21.7)) eslint-plugin-react-hooks: specifier: ^4.6.2 - version: 4.6.2(eslint@9.23.0(jiti@1.21.7)) + version: 4.6.2(eslint@9.26.0(jiti@1.21.7)) eslint-plugin-react-refresh: - specifier: ^0.4.19 - version: 0.4.19(eslint@9.23.0(jiti@1.21.7)) + specifier: ^0.4.20 + version: 0.4.20(eslint@9.26.0(jiti@1.21.7)) eslint-plugin-unicorn: specifier: ^55.0.0 - version: 55.0.0(eslint@9.23.0(jiti@1.21.7)) + version: 55.0.0(eslint@9.26.0(jiti@1.21.7)) globals: specifier: ^15.15.0 version: 15.15.0 happy-dom: - specifier: ^17.4.4 - version: 17.4.4 + specifier: ^17.4.6 + version: 17.4.6 + jsonc-eslint-parser: + specifier: ^2.4.0 + version: 2.4.0 msw: - specifier: ^2.7.3 - version: 2.7.3(@types/node@22.13.11)(typescript@5.5.4) + specifier: ^2.7.5 + version: 2.7.6(@types/node@22.15.14)(typescript@5.8.3) openapi-merge-cli: specifier: ^1.3.2 version: 1.3.2 @@ -205,20 +235,20 @@ importers: specifier: ^3.5.3 version: 3.5.3 typescript: - specifier: ~5.5.4 - version: 5.5.4 + specifier: ~5.8.3 + version: 5.8.3 typescript-eslint: - specifier: ^8.27.0 - version: 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + specifier: ^8.31.1 + version: 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) vite: - specifier: ^5.4.17 - version: 5.4.17(@types/node@22.13.11) + specifier: ^5.4.19 + version: 5.4.19(@types/node@22.15.14) vite-plugin-css-injected-by-js: specifier: ^3.5.2 - version: 3.5.2(vite@5.4.17(@types/node@22.13.11)) + version: 3.5.2(vite@5.4.19(@types/node@22.15.14)) vitest: specifier: ^2.1.9 - version: 2.1.9(@types/node@22.13.11)(happy-dom@17.4.4)(msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4)) + version: 2.1.9(@types/node@22.15.14)(happy-dom@17.4.6)(msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3)) web-worker: specifier: ^1.5.0 version: 1.5.0 @@ -246,8 +276,8 @@ packages: resolution: {integrity: sha512-9K6xOqeevacvweLGik6LnZCb1fBtCOSIWQs8d096XGeqoLKC33UVMGz9+77Gw44KvbH4pKcQPWo4ZpxkXYj05w==} engines: {node: '>= 16'} - '@ark-ui/react@5.5.0': - resolution: {integrity: sha512-zLERNKOrf77K0OMOLoo5+jZQn9uXxYck56gBzx/zhW2SjFe0M2lE6VyaIiwgKGIqbGre59gD9/tyTsqO6bqARQ==} + '@ark-ui/react@5.12.0': + resolution: {integrity: sha512-UV89EqyESZoyr6rtvrbFJn/FejpswhvRVcfK44dZDU6h6UY8CxfR/6Ayvrq9UtFdD0dEawqwWrXS22l8Y05Nnw==} peerDependencies: react: '>=18.0.0' react-dom: '>=18.0.0' @@ -256,6 +286,10 @@ packages: resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==} engines: {node: '>=6.9.0'} + '@babel/code-frame@7.27.1': + resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} + engines: {node: '>=6.9.0'} + '@babel/generator@7.17.7': resolution: {integrity: sha512-oLcVCTeIFadUoArDTwpluncplrYBmTCCZZgXCbgNGvOBBiSDDK3eWO4b/+eOTli5tKv1lg+a5/NAXg+nTcei1w==} engines: {node: '>=6.9.0'} @@ -292,6 +326,10 @@ packages: resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==} engines: {node: '>=6.9.0'} + '@babel/helper-validator-identifier@7.27.1': + resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} + engines: {node: '>=6.9.0'} + '@babel/parser@7.26.10': resolution: {integrity: sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==} engines: {node: '>=6.0.0'} @@ -336,8 +374,8 @@ packages: '@chakra-ui/anatomy@2.3.4': resolution: {integrity: sha512-fFIYN7L276gw0Q7/ikMMlZxP7mvnjRaWJ7f3Jsf9VtDOi6eAYIBRrhQe6+SZ0PGmoOkRaBc7gSE5oeIbgFFyrw==} - '@chakra-ui/react@3.15.1': - resolution: {integrity: sha512-BZPKvGoiWn1OTkXn4M9XJvXVES6urlCxARsT2XZhFaV41/tkYvuBMSVQ8fbZdCeY7YfnxCbiLLth46tDeEZEyQ==} + '@chakra-ui/react@3.20.0': + resolution: {integrity: sha512-zHYQAUqrT2pZZ/Xi+sskRC/An9q4ZelLPJkFHdobftTYkcFo1FtkMbBO0AEBZhb/6mZGyfw3JLflSawkuR++uQ==} peerDependencies: '@emotion/react': '>=11' react: '>=18' @@ -351,9 +389,6 @@ packages: '@codemirror/view': ^6.0.0 '@lezer/common': ^1.0.0 - '@codemirror/commands@6.8.0': - resolution: {integrity: sha512-q8VPEFaEP4ikSlt6ZxjB3zW72+7osfAYW9i8Zu943uqbKuz6utc1+F170hyLUCUltXORjQXRyYQNfkckzA/bPQ==} - '@codemirror/commands@6.8.1': resolution: {integrity: sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==} @@ -566,12 +601,18 @@ packages: peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + '@eslint-community/eslint-utils@4.7.0': + resolution: {integrity: sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + '@eslint-community/regexpp@4.12.1': resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - '@eslint/compat@1.2.7': - resolution: {integrity: sha512-xvv7hJE32yhegJ8xNAnb62ggiAwTYHBpUCWhRxEj/ksvgDJuSXfoDkBcRYaYNFiJ+jH0IE3K16hd+xXzhBgNbg==} + '@eslint/compat@1.2.9': + resolution: {integrity: sha512-gCdSY54n7k+driCadyMNv8JSPzYLeDVM/ikZRtvtROBpRdFSkS8W9A82MqsaY7lZuwL0wiapgD0NT1xT0hyJsA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^9.10.0 @@ -579,40 +620,46 @@ packages: eslint: optional: true - '@eslint/config-array@0.19.2': - resolution: {integrity: sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==} + '@eslint/config-array@0.20.0': + resolution: {integrity: sha512-fxlS1kkIjx8+vy2SjuCB94q3htSNrufYTXubwiBFeaQHbH6Ipi43gFJq2zCMt6PHhImH3Xmr0NksKDvchWlpQQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/config-helpers@0.2.0': - resolution: {integrity: sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ==} + '@eslint/config-helpers@0.2.2': + resolution: {integrity: sha512-+GPzk8PlG0sPpzdU5ZvIRMPidzAnZDl/s9L+y13iodqvb8leL53bTannOrQ/Im7UkpsmFU5Ily5U60LWixnmLg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@0.12.0': - resolution: {integrity: sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==} + '@eslint/core@0.13.0': + resolution: {integrity: sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/eslintrc@3.3.1': resolution: {integrity: sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.23.0': - resolution: {integrity: sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw==} + '@eslint/js@9.26.0': + resolution: {integrity: sha512-I9XlJawFdSMvWjDt6wksMCrgns5ggLNfFwFvnShsleWruvXM514Qxk8V246efTw+eo9JABvVz+u3q2RiAowKxQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/object-schema@2.1.6': resolution: {integrity: sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/plugin-kit@0.2.7': - resolution: {integrity: sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==} + '@eslint/plugin-kit@0.2.8': + resolution: {integrity: sha512-ZAoA40rNMPwSm+AeHpCq8STiNAwzWLJuP8Xv4CHIc9wv/PSuExjMrmjfYNj682vW0OOiZ1HKxzvjQr9XZIisQA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@floating-ui/core@1.6.9': resolution: {integrity: sha512-uMXCuQ3BItDUbAMhIXw7UPXRfAlOAvZzdK9BWpE60MCn+Svt3aLn9jsPTi/WNGlRUu2uI0v5S7JiIUsbsvh3fw==} + '@floating-ui/core@1.7.1': + resolution: {integrity: sha512-azI0DrjMMfIug/ExbBaeDVJXcY0a7EPvPjb2xAJPa4HeimBX+Z18HK8QQR3jb6356SnDDdxx+hinMLcJEDdOjw==} + '@floating-ui/dom@1.6.13': resolution: {integrity: sha512-umqzocjDgNRGTuO7Q8CU32dkHkECqI8ZdMZ5Swb6QAM0t5rnlrN3lGo1hdpscRd3WS8T6DKYK4ephgIH9iRh3w==} + '@floating-ui/dom@1.7.1': + resolution: {integrity: sha512-cwsmW/zyw5ltYTUeeYJ60CnQuPqmGwuGVhG9w0PRaRKkAyi38BT5CKrpIbb+jtahSwUl04cWzSx9ZOIxeS6RsQ==} + '@floating-ui/utils@0.2.9': resolution: {integrity: sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==} @@ -674,11 +721,11 @@ packages: '@types/node': optional: true - '@internationalized/date@3.7.0': - resolution: {integrity: sha512-VJ5WS3fcVx0bejE/YHfbDKR/yawZgKqn/if+oEeLqNwBtPzVB06olkfcnojTmEMX+gTpH+FlQ69SHNitJ8/erQ==} + '@internationalized/date@3.8.1': + resolution: {integrity: sha512-PgVE6B6eIZtzf9Gu5HvJxRK3ufUFz9DhspELuhW/N0GuMGMTLvPQNRkHP2hTuP9lblOk+f+1xi96sPiPXANXAA==} - '@internationalized/number@3.6.0': - resolution: {integrity: sha512-PtrRcJVy7nw++wn4W2OuePQQfTqDzfusSuY1QTtui4wa7r+rGVtR75pO8CyKvHvzyQYi3Q1uO5sY0AsB4e65Bw==} + '@internationalized/number@3.6.2': + resolution: {integrity: sha512-E5QTOlMg9wo5OrKdHD6edo1JJlIoOsylh0+mbf0evi1tHJwMZfJSaBpGtnJV9N7w3jeiioox9EG/EWRWPh82vg==} '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} @@ -727,6 +774,10 @@ packages: '@marijn/find-cluster-break@1.0.2': resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==} + '@modelcontextprotocol/sdk@1.11.0': + resolution: {integrity: sha512-k/1pb70eD638anoi0e8wUGAlbMJXyvdV4p62Ko+EZ7eBe1xMx8Uhak1R5DgfoofsK5IBBnRwsYGTaLZl+6/+RQ==} + engines: {node: '>=18'} + '@mswjs/interceptors@0.37.6': resolution: {integrity: sha512-wK+5pLK5XFmgtH3aQ2YVvA3HohS3xqV/OxuVOdNx9Wpnz7VE/fnC+e1A7ln6LFYeck7gOJ/dsZV6OLplOtAJ2w==} engines: {node: '>=18'} @@ -752,118 +803,118 @@ packages: '@open-draft/until@2.1.0': resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} - '@pandacss/is-valid-prop@0.41.0': - resolution: {integrity: sha512-BE6h6CsJk14ugIRrsazJtN3fcg+KDFRat1Bs93YFKH6jd4DOb1yUyVvC70jKqPVvg70zEcV8acZ7VdcU5TLu+w==} + '@pandacss/is-valid-prop@0.53.6': + resolution: {integrity: sha512-TgWBQmz/5j/oAMjavqJAjQh1o+yxhYspKvepXPn4lFhAN3yBhilrw9HliAkvpUr0sB2CkJ2BYMpFXbAJYEocsA==} '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@pkgr/core@0.1.2': - resolution: {integrity: sha512-fdDH1LSGfZdTH2sxdpVMw31BanV28K/Gry0cVFxaNP77neJSkd82mM8ErPNYs9e+0O7SdHBLTDzDgwUuy18RnQ==} + '@pkgr/core@0.2.4': + resolution: {integrity: sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} '@remix-run/router@1.23.0': resolution: {integrity: sha512-O3rHJzAQKamUz1fvE0Qaw0xSFqsA/yafi2iqeE0pvdFtCO1viYx8QL6f3Ln/aCCTLxs68SLf0KPM9eSeM8yBnA==} engines: {node: '>=14.0.0'} - '@rollup/rollup-android-arm-eabi@4.39.0': - resolution: {integrity: sha512-lGVys55Qb00Wvh8DMAocp5kIcaNzEFTmGhfFd88LfaogYTRKrdxgtlO5H6S49v2Nd8R2C6wLOal0qv6/kCkOwA==} + '@rollup/rollup-android-arm-eabi@4.40.1': + resolution: {integrity: sha512-kxz0YeeCrRUHz3zyqvd7n+TVRlNyTifBsmnmNPtk3hQURUyG9eAB+usz6DAwagMusjx/zb3AjvDUvhFGDAexGw==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.39.0': - resolution: {integrity: sha512-It9+M1zE31KWfqh/0cJLrrsCPiF72PoJjIChLX+rEcujVRCb4NLQ5QzFkzIZW8Kn8FTbvGQBY5TkKBau3S8cCQ==} + '@rollup/rollup-android-arm64@4.40.1': + resolution: {integrity: sha512-PPkxTOisoNC6TpnDKatjKkjRMsdaWIhyuMkA4UsBXT9WEZY4uHezBTjs6Vl4PbqQQeu6oION1w2voYZv9yquCw==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.39.0': - resolution: {integrity: sha512-lXQnhpFDOKDXiGxsU9/l8UEGGM65comrQuZ+lDcGUx+9YQ9dKpF3rSEGepyeR5AHZ0b5RgiligsBhWZfSSQh8Q==} + '@rollup/rollup-darwin-arm64@4.40.1': + resolution: {integrity: sha512-VWXGISWFY18v/0JyNUy4A46KCFCb9NVsH+1100XP31lud+TzlezBbz24CYzbnA4x6w4hx+NYCXDfnvDVO6lcAA==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.39.0': - resolution: {integrity: sha512-mKXpNZLvtEbgu6WCkNij7CGycdw9cJi2k9v0noMb++Vab12GZjFgUXD69ilAbBh034Zwn95c2PNSz9xM7KYEAQ==} + '@rollup/rollup-darwin-x64@4.40.1': + resolution: {integrity: sha512-nIwkXafAI1/QCS7pxSpv/ZtFW6TXcNUEHAIA9EIyw5OzxJZQ1YDrX+CL6JAIQgZ33CInl1R6mHet9Y/UZTg2Bw==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.39.0': - resolution: {integrity: sha512-jivRRlh2Lod/KvDZx2zUR+I4iBfHcu2V/BA2vasUtdtTN2Uk3jfcZczLa81ESHZHPHy4ih3T/W5rPFZ/hX7RtQ==} + '@rollup/rollup-freebsd-arm64@4.40.1': + resolution: {integrity: sha512-BdrLJ2mHTrIYdaS2I99mriyJfGGenSaP+UwGi1kB9BLOCu9SR8ZpbkmmalKIALnRw24kM7qCN0IOm6L0S44iWw==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.39.0': - resolution: {integrity: sha512-8RXIWvYIRK9nO+bhVz8DwLBepcptw633gv/QT4015CpJ0Ht8punmoHU/DuEd3iw9Hr8UwUV+t+VNNuZIWYeY7Q==} + '@rollup/rollup-freebsd-x64@4.40.1': + resolution: {integrity: sha512-VXeo/puqvCG8JBPNZXZf5Dqq7BzElNJzHRRw3vjBE27WujdzuOPecDPc/+1DcdcTptNBep3861jNq0mYkT8Z6Q==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.39.0': - resolution: {integrity: sha512-mz5POx5Zu58f2xAG5RaRRhp3IZDK7zXGk5sdEDj4o96HeaXhlUwmLFzNlc4hCQi5sGdR12VDgEUqVSHer0lI9g==} + '@rollup/rollup-linux-arm-gnueabihf@4.40.1': + resolution: {integrity: sha512-ehSKrewwsESPt1TgSE/na9nIhWCosfGSFqv7vwEtjyAqZcvbGIg4JAcV7ZEh2tfj/IlfBeZjgOXm35iOOjadcg==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.39.0': - resolution: {integrity: sha512-+YDwhM6gUAyakl0CD+bMFpdmwIoRDzZYaTWV3SDRBGkMU/VpIBYXXEvkEcTagw/7VVkL2vA29zU4UVy1mP0/Yw==} + '@rollup/rollup-linux-arm-musleabihf@4.40.1': + resolution: {integrity: sha512-m39iO/aaurh5FVIu/F4/Zsl8xppd76S4qoID8E+dSRQvTyZTOI2gVk3T4oqzfq1PtcvOfAVlwLMK3KRQMaR8lg==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.39.0': - resolution: {integrity: sha512-EKf7iF7aK36eEChvlgxGnk7pdJfzfQbNvGV/+l98iiMwU23MwvmV0Ty3pJ0p5WQfm3JRHOytSIqD9LB7Bq7xdQ==} + '@rollup/rollup-linux-arm64-gnu@4.40.1': + resolution: {integrity: sha512-Y+GHnGaku4aVLSgrT0uWe2o2Rq8te9hi+MwqGF9r9ORgXhmHK5Q71N757u0F8yU1OIwUIFy6YiJtKjtyktk5hg==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.39.0': - resolution: {integrity: sha512-vYanR6MtqC7Z2SNr8gzVnzUul09Wi1kZqJaek3KcIlI/wq5Xtq4ZPIZ0Mr/st/sv/NnaPwy/D4yXg5x0B3aUUA==} + '@rollup/rollup-linux-arm64-musl@4.40.1': + resolution: {integrity: sha512-jEwjn3jCA+tQGswK3aEWcD09/7M5wGwc6+flhva7dsQNRZZTe30vkalgIzV4tjkopsTS9Jd7Y1Bsj6a4lzz8gQ==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loongarch64-gnu@4.39.0': - resolution: {integrity: sha512-NMRUT40+h0FBa5fb+cpxtZoGAggRem16ocVKIv5gDB5uLDgBIwrIsXlGqYbLwW8YyO3WVTk1FkFDjMETYlDqiw==} + '@rollup/rollup-linux-loongarch64-gnu@4.40.1': + resolution: {integrity: sha512-ySyWikVhNzv+BV/IDCsrraOAZ3UaC8SZB67FZlqVwXwnFhPihOso9rPOxzZbjp81suB1O2Topw+6Ug3JNegejQ==} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-powerpc64le-gnu@4.39.0': - resolution: {integrity: sha512-0pCNnmxgduJ3YRt+D+kJ6Ai/r+TaePu9ZLENl+ZDV/CdVczXl95CbIiwwswu4L+K7uOIGf6tMo2vm8uadRaICQ==} + '@rollup/rollup-linux-powerpc64le-gnu@4.40.1': + resolution: {integrity: sha512-BvvA64QxZlh7WZWqDPPdt0GH4bznuL6uOO1pmgPnnv86rpUpc8ZxgZwcEgXvo02GRIZX1hQ0j0pAnhwkhwPqWg==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.39.0': - resolution: {integrity: sha512-t7j5Zhr7S4bBtksT73bO6c3Qa2AV/HqiGlj9+KB3gNF5upcVkx+HLgxTm8DK4OkzsOYqbdqbLKwvGMhylJCPhQ==} + '@rollup/rollup-linux-riscv64-gnu@4.40.1': + resolution: {integrity: sha512-EQSP+8+1VuSulm9RKSMKitTav89fKbHymTf25n5+Yr6gAPZxYWpj3DzAsQqoaHAk9YX2lwEyAf9S4W8F4l3VBQ==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-riscv64-musl@4.39.0': - resolution: {integrity: sha512-m6cwI86IvQ7M93MQ2RF5SP8tUjD39Y7rjb1qjHgYh28uAPVU8+k/xYWvxRO3/tBN2pZkSMa5RjnPuUIbrwVxeA==} + '@rollup/rollup-linux-riscv64-musl@4.40.1': + resolution: {integrity: sha512-n/vQ4xRZXKuIpqukkMXZt9RWdl+2zgGNx7Uda8NtmLJ06NL8jiHxUawbwC+hdSq1rrw/9CghCpEONor+l1e2gA==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.39.0': - resolution: {integrity: sha512-iRDJd2ebMunnk2rsSBYlsptCyuINvxUfGwOUldjv5M4tpa93K8tFMeYGpNk2+Nxl+OBJnBzy2/JCscGeO507kA==} + '@rollup/rollup-linux-s390x-gnu@4.40.1': + resolution: {integrity: sha512-h8d28xzYb98fMQKUz0w2fMc1XuGzLLjdyxVIbhbil4ELfk5/orZlSTpF/xdI9C8K0I8lCkq+1En2RJsawZekkg==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.39.0': - resolution: {integrity: sha512-t9jqYw27R6Lx0XKfEFe5vUeEJ5pF3SGIM6gTfONSMb7DuG6z6wfj2yjcoZxHg129veTqU7+wOhY6GX8wmf90dA==} + '@rollup/rollup-linux-x64-gnu@4.40.1': + resolution: {integrity: sha512-XiK5z70PEFEFqcNj3/zRSz/qX4bp4QIraTy9QjwJAb/Z8GM7kVUsD0Uk8maIPeTyPCP03ChdI+VVmJriKYbRHQ==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.39.0': - resolution: {integrity: sha512-ThFdkrFDP55AIsIZDKSBWEt/JcWlCzydbZHinZ0F/r1h83qbGeenCt/G/wG2O0reuENDD2tawfAj2s8VK7Bugg==} + '@rollup/rollup-linux-x64-musl@4.40.1': + resolution: {integrity: sha512-2BRORitq5rQ4Da9blVovzNCMaUlyKrzMSvkVR0D4qPuOy/+pMCrh1d7o01RATwVy+6Fa1WBw+da7QPeLWU/1mQ==} cpu: [x64] os: [linux] - '@rollup/rollup-win32-arm64-msvc@4.39.0': - resolution: {integrity: sha512-jDrLm6yUtbOg2TYB3sBF3acUnAwsIksEYjLeHL+TJv9jg+TmTwdyjnDex27jqEMakNKf3RwwPahDIt7QXCSqRQ==} + '@rollup/rollup-win32-arm64-msvc@4.40.1': + resolution: {integrity: sha512-b2bcNm9Kbde03H+q+Jjw9tSfhYkzrDUf2d5MAd1bOJuVplXvFhWz7tRtWvD8/ORZi7qSCy0idW6tf2HgxSXQSg==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.39.0': - resolution: {integrity: sha512-6w9uMuza+LbLCVoNKL5FSLE7yvYkq9laSd09bwS0tMjkwXrmib/4KmoJcrKhLWHvw19mwU+33ndC69T7weNNjQ==} + '@rollup/rollup-win32-ia32-msvc@4.40.1': + resolution: {integrity: sha512-DfcogW8N7Zg7llVEfpqWMZcaErKfsj9VvmfSyRjCyo4BI3wPEfrzTtJkZG6gKP/Z92wFm6rz2aDO7/JfiR/whA==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.39.0': - resolution: {integrity: sha512-yAkUOkIKZlK5dl7u6dg897doBgLXmUHhIINM2c+sND3DZwnrdQkkSiDh7N75Ll4mM4dxSkYfXqU9fW3lLkMFug==} + '@rollup/rollup-win32-x64-msvc@4.40.1': + resolution: {integrity: sha512-ECyOuDeH3C1I8jH2MK1RtBJW+YPMvSfT0a5NN0nHfQYnDSJ6tUiZH3gzwVP5/Kfh/+Tt7tpWVF9LXNTnhTJ3kA==} cpu: [x64] os: [win32] @@ -873,71 +924,71 @@ packages: peerDependencies: eslint: '>=8.40.0' - '@swc/core-darwin-arm64@1.11.12': - resolution: {integrity: sha512-x+iljeyIaVq7VCAy9pM0rqAb9GKA1cqDkqCxgFDxH3rcH+ykZa12vkDlTwysgkfLV8pr0KhCRHkwY+iAqPbO9g==} + '@swc/core-darwin-arm64@1.11.24': + resolution: {integrity: sha512-dhtVj0PC1APOF4fl5qT2neGjRLgHAAYfiVP8poJelhzhB/318bO+QCFWAiimcDoyMgpCXOhTp757gnoJJrheWA==} engines: {node: '>=10'} cpu: [arm64] os: [darwin] - '@swc/core-darwin-x64@1.11.12': - resolution: {integrity: sha512-DwTXPdhJ/+scUR1iWttu3p0q8b5omF71xWFCw6UC99QBJQ4femmRtZNacgdiBkxZ5IbUlxd8m5UzMBc/+H5rWw==} + '@swc/core-darwin-x64@1.11.24': + resolution: {integrity: sha512-H/3cPs8uxcj2Fe3SoLlofN5JG6Ny5bl8DuZ6Yc2wr7gQFBmyBkbZEz+sPVgsID7IXuz7vTP95kMm1VL74SO5AQ==} engines: {node: '>=10'} cpu: [x64] os: [darwin] - '@swc/core-linux-arm-gnueabihf@1.11.12': - resolution: {integrity: sha512-ls9b3lX2x3tnJKGn6zSDFK1ohdmdUkE6nwqrVmdzqAwr/Q5i2ij/dmkOFCloItc2PHNVtRGGsC4+FYSm1EBLjg==} + '@swc/core-linux-arm-gnueabihf@1.11.24': + resolution: {integrity: sha512-PHJgWEpCsLo/NGj+A2lXZ2mgGjsr96ULNW3+T3Bj2KTc8XtMUkE8tmY2Da20ItZOvPNC/69KroU7edyo1Flfbw==} engines: {node: '>=10'} cpu: [arm] os: [linux] - '@swc/core-linux-arm64-gnu@1.11.12': - resolution: {integrity: sha512-F0nMLl5kYbew5GjHq7B21poE5VOPgSsoQ0VEXd4Fji3rR0d0gLoK2r+JP92XmpRxAzdzpdak1DQczWMyf2BQAQ==} + '@swc/core-linux-arm64-gnu@1.11.24': + resolution: {integrity: sha512-C2FJb08+n5SD4CYWCTZx1uR88BN41ZieoHvI8A55hfVf2woT8+6ZiBzt74qW2g+ntZ535Jts5VwXAKdu41HpBg==} engines: {node: '>=10'} cpu: [arm64] os: [linux] - '@swc/core-linux-arm64-musl@1.11.12': - resolution: {integrity: sha512-3dlHowBgYBgi23ZBSvFHe/tD3PowEhxfVAy08NckWBeaG/e4dyrYMhAiccfuy6jkDYXEF1L2DtpRtxGImxoaPg==} + '@swc/core-linux-arm64-musl@1.11.24': + resolution: {integrity: sha512-ypXLIdszRo0re7PNNaXN0+2lD454G8l9LPK/rbfRXnhLWDBPURxzKlLlU/YGd2zP98wPcVooMmegRSNOKfvErw==} engines: {node: '>=10'} cpu: [arm64] os: [linux] - '@swc/core-linux-x64-gnu@1.11.12': - resolution: {integrity: sha512-ToEWzLA5lXlYCbGNzMow6+uy4zhpXKQyFb3RHM8AYVb0n4pNPWvwF+8ybWDimeGBBaHJLgRQsUMuJ4NV6urSrA==} + '@swc/core-linux-x64-gnu@1.11.24': + resolution: {integrity: sha512-IM7d+STVZD48zxcgo69L0yYptfhaaE9cMZ+9OoMxirNafhKKXwoZuufol1+alEFKc+Wbwp+aUPe/DeWC/Lh3dg==} engines: {node: '>=10'} cpu: [x64] os: [linux] - '@swc/core-linux-x64-musl@1.11.12': - resolution: {integrity: sha512-N5xF+MDZr79e8gvVXX3YP1bMeaRL16Kst/R7bGUQvvCq1UGD86qMUtSr5KfCl0h5SNKP2YKtkN98HQLnGEikow==} + '@swc/core-linux-x64-musl@1.11.24': + resolution: {integrity: sha512-DZByJaMVzSfjQKKQn3cqSeqwy6lpMaQDQQ4HPlch9FWtDx/dLcpdIhxssqZXcR2rhaQVIaRQsCqwV6orSDGAGw==} engines: {node: '>=10'} cpu: [x64] os: [linux] - '@swc/core-win32-arm64-msvc@1.11.12': - resolution: {integrity: sha512-/PYiyYWSQRtMoOamMfhAfq0y3RWk9LpUZ49yetJn2XI85TRkL5u2DTLLNkTPvoTiCfo0eZOJF9t5b7Z6ly0iHQ==} + '@swc/core-win32-arm64-msvc@1.11.24': + resolution: {integrity: sha512-Q64Ytn23y9aVDKN5iryFi8mRgyHw3/kyjTjT4qFCa8AEb5sGUuSj//AUZ6c0J7hQKMHlg9do5Etvoe61V98/JQ==} engines: {node: '>=10'} cpu: [arm64] os: [win32] - '@swc/core-win32-ia32-msvc@1.11.12': - resolution: {integrity: sha512-Dxm6W4p0YVNIPnYh/Kf/9zPeaD6sVAGDQN+2c52l4m/4gR5aDgE+xg6k5lAt4ok7LDXInL3n1nwYEG7Tc4JcSQ==} + '@swc/core-win32-ia32-msvc@1.11.24': + resolution: {integrity: sha512-9pKLIisE/Hh2vJhGIPvSoTK4uBSPxNVyXHmOrtdDot4E1FUUI74Vi8tFdlwNbaj8/vusVnb8xPXsxF1uB0VgiQ==} engines: {node: '>=10'} cpu: [ia32] os: [win32] - '@swc/core-win32-x64-msvc@1.11.12': - resolution: {integrity: sha512-PP8RSJTcda5nUHJGkbKeQ20OC+L2LxcbjYpyha1OqIFyu/qWG9zMMYVaTLKJL7zsJ14pIM/mpS3u+CJARQ+Hzw==} + '@swc/core-win32-x64-msvc@1.11.24': + resolution: {integrity: sha512-sybnXtOsdB+XvzVFlBVGgRHLqp3yRpHK7CrmpuDKszhj/QhmsaZzY/GHSeALlMtLup13M0gqbcQvsTNlAHTg3w==} engines: {node: '>=10'} cpu: [x64] os: [win32] - '@swc/core@1.11.12': - resolution: {integrity: sha512-Jwx9JH1O6Vm7BS9AEPLlquJNSy6Lbt/kiJIlxSslDuBLeDJD13lXQfitvazqgRwGEHx1QmwEq8mc0OSristtRw==} + '@swc/core@1.11.24': + resolution: {integrity: sha512-MaQEIpfcEMzx3VWWopbofKJvaraqmL6HbLlw2bFZ7qYqYw3rkhM0cQVEgyzbHtTWwCwPMFZSC2DUbhlZgrMfLg==} engines: {node: '>=10'} peerDependencies: - '@swc/helpers': '*' + '@swc/helpers': '>=0.5.17' peerDependenciesMeta: '@swc/helpers': optional: true @@ -948,33 +999,42 @@ packages: '@swc/helpers@0.5.15': resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==} - '@swc/types@0.1.19': - resolution: {integrity: sha512-WkAZaAfj44kh/UFdAQcrMP1I0nwRqpt27u+08LMBYMqmQfwwMofYoMh/48NGkMMRfC4ynpfwRbJuu8ErfNloeA==} + '@swc/types@0.1.21': + resolution: {integrity: sha512-2YEtj5HJVbKivud9N4bpPBAyZhj4S2Ipe5LkUG94alTpr7in/GU/EARgPAd3BwU+YOmFVJC2+kjqhGRi3r0ZpQ==} - '@tanstack/eslint-plugin-query@5.68.0': - resolution: {integrity: sha512-w/+y5LILV1GTWBB2R/lKfUzgocKXU1B7O6jipLUJhmxCKPmJFy5zpfR1Vx7c6yCEsQoKcTbhuR/tIy+1sIGaiA==} + '@tanstack/eslint-plugin-query@5.74.7': + resolution: {integrity: sha512-EeHuaaYiCOD+XOGyB7LMNEx9OEByAa5lkgP+S3ZggjKJpmIO6iRWeoIYYDKo2F8uc3qXcVhTfC7pn7NddQiNtA==} peerDependencies: eslint: ^8.57.0 || ^9.0.0 - '@tanstack/query-core@5.69.0': - resolution: {integrity: sha512-Kn410jq6vs1P8Nm+ZsRj9H+U3C0kjuEkYLxbiCyn3MDEiYor1j2DGVULqAz62SLZtUZ/e9Xt6xMXiJ3NJ65WyQ==} + '@tanstack/query-core@5.75.4': + resolution: {integrity: sha512-pcqOUgWG9oGlzkfRQQMMsEFmtQu0wq81A414CtELZGq+ztVwSTAaoB3AZRAXQJs88LmNMk2YpUKuQbrvzNDyRg==} - '@tanstack/react-query@5.69.0': - resolution: {integrity: sha512-Ift3IUNQqTcaFa1AiIQ7WCb/PPy8aexZdq9pZWLXhfLcLxH0+PZqJ2xFImxCpdDZrFRZhLJrh76geevS5xjRhA==} + '@tanstack/react-query@5.75.4': + resolution: {integrity: sha512-Vf65pzYRkf8fk9SP1ncIZjvaXszBhtsvpf+h45Y/9kOywOrVZfBGUpCdffdsVzbmBzmz6TCFes9bM0d3pRrIsA==} peerDependencies: react: ^18 || ^19 - '@tanstack/react-table@8.21.2': - resolution: {integrity: sha512-11tNlEDTdIhMJba2RBH+ecJ9l1zgS2kjmexDPAraulc8jeNA4xocSNeyzextT0XJyASil4XsCYlJmf5jEWAtYg==} + '@tanstack/react-table@8.21.3': + resolution: {integrity: sha512-5nNMTSETP4ykGegmVkhjcS8tTLW6Vl4axfEGQN3v0zdHYbK4UfoqfPChclTrJ4EoK9QynqAu9oUf8VEmrpZ5Ww==} engines: {node: '>=12'} peerDependencies: react: '>=16.8' react-dom: '>=16.8' - '@tanstack/table-core@8.21.2': - resolution: {integrity: sha512-uvXk/U4cBiFMxt+p9/G7yUWI/UbHYbyghLCjlpWZ3mLeIZiUBSKcUnw9UnKkdRz7Z/N4UBuFLWQdJCjUe7HjvA==} + '@tanstack/react-virtual@3.13.8': + resolution: {integrity: sha512-meS2AanUg50f3FBSNoAdBSRAh8uS0ue01qm7zrw65KGJtiXB9QXfybqZwkh4uFpRv2iX/eu5tjcH5wqUpwYLPg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@tanstack/table-core@8.21.3': + resolution: {integrity: sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==} engines: {node: '>=12'} + '@tanstack/virtual-core@3.13.8': + resolution: {integrity: sha512-BT6w89Hqy7YKaWewYzmecXQzcJh6HTBbKYJIIkMaNU49DZ06LoTV3z32DWWEdUsgW6n1xTmwTLs4GtWrZC261w==} + '@testing-library/dom@10.4.0': resolution: {integrity: sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==} engines: {node: '>=18'} @@ -983,8 +1043,8 @@ packages: resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} - '@testing-library/react@16.2.0': - resolution: {integrity: sha512-2cSskAvA1QNtKc8Y9VJQRv0tm3hLVgxRGDB+KYhIaPQJ1I+RHbhIXcM+zClKXzMes/wshsMVzf4B9vS4IZpqDQ==} + '@testing-library/react@16.3.0': + resolution: {integrity: sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==} engines: {node: '>=18'} peerDependencies: '@testing-library/dom': ^10.0.0 @@ -1103,8 +1163,8 @@ packages: '@types/ms@2.1.0': resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} - '@types/node@22.13.11': - resolution: {integrity: sha512-iEUCUJoU0i3VnrCmgoWCXttklWcvoCIx4jzcP22fioIVSdTmjgoEvmAO/QPw6TcS9k5FrNgn4w7q5lGOd1CT5g==} + '@types/node@22.15.14': + resolution: {integrity: sha512-BL1eyu/XWsFGTtDWOYULQEs4KR0qdtYfCxYAUYRoB7JP7h9ETYLgQTww6kH8Sj2C0pFGgrpM0XKv6/kbIzYJ1g==} '@types/normalize-package-data@2.4.4': resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} @@ -1112,6 +1172,9 @@ packages: '@types/parse-json@4.0.2': resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==} + '@types/pegjs@0.10.6': + resolution: {integrity: sha512-eLYXDbZWXh2uxf+w8sXS8d6KSoXTswfps6fvCUuVAGN8eRpfe7h9eSRydxiSJvo9Bf+GzifsDOr9TMQlmJdmkw==} + '@types/prop-types@15.7.14': resolution: {integrity: sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ==} @@ -1143,16 +1206,16 @@ packages: '@types/unist@3.0.3': resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} - '@typescript-eslint/eslint-plugin@8.27.0': - resolution: {integrity: sha512-4henw4zkePi5p252c8ncBLzLce52SEUz2Ebj8faDnuUXz2UuHEONYcJ+G0oaCF+bYCWVZtrGzq3FD7YXetmnSA==} + '@typescript-eslint/eslint-plugin@8.32.0': + resolution: {integrity: sha512-/jU9ettcntkBFmWUzzGgsClEi2ZFiikMX5eEQsmxIAWMOn4H3D4rvHssstmAHGVvrYnaMqdWWWg0b5M6IN/MTQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: '@typescript-eslint/parser': ^8.0.0 || ^8.0.0-alpha.0 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/parser@8.27.0': - resolution: {integrity: sha512-XGwIabPallYipmcOk45DpsBSgLC64A0yvdAkrwEzwZ2viqGqRUJ8eEYoPz0CWnutgAFbNMPdsGGvzjSmcWVlEA==} + '@typescript-eslint/parser@8.32.0': + resolution: {integrity: sha512-B2MdzyWxCE2+SqiZHAjPphft+/2x2FlO9YBx7eKE1BCb+rqBlQdhtAEhzIEdozHd55DXPmxBdpMygFJjfjjA9A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -1166,8 +1229,12 @@ packages: resolution: {integrity: sha512-u2oITX3BJwzWCapoZ/pXw6BCOl8rJP4Ij/3wPoGvY8XwvXflOzd1kLrDUUUAIEdJSFh+ASwdTHqtan9xSg8buw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/type-utils@8.27.0': - resolution: {integrity: sha512-wVArTVcz1oJOIEJxui/nRhV0TXzD/zMSOYi/ggCfNq78EIszddXcJb7r4RCp/oBrjt8n9A0BSxRMKxHftpDxDA==} + '@typescript-eslint/scope-manager@8.32.0': + resolution: {integrity: sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/type-utils@8.32.0': + resolution: {integrity: sha512-t2vouuYQKEKSLtJaa5bB4jHeha2HJczQ6E5IXPDPgIty9EqcJxpr1QHQ86YyIPwDwxvUmLfP2YADQ5ZY4qddZg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -1181,6 +1248,10 @@ packages: resolution: {integrity: sha512-bn4WS1bkKEjx7HqiwG2JNB3YJdC1q6Ue7GyGlwPHyt0TnVq6TtD/hiOdTZt71sq0s7UzqBFXD8t8o2e63tXgwA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/types@8.32.0': + resolution: {integrity: sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/typescript-estree@8.27.0': resolution: {integrity: sha512-BnKq8cqPVoMw71O38a1tEb6iebEgGA80icSxW7g+kndx0o6ot6696HjG7NdgfuAVmVEtwXUr3L8R9ZuVjoQL6A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -1193,6 +1264,12 @@ packages: peerDependencies: typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/typescript-estree@8.32.0': + resolution: {integrity: sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/utils@8.27.0': resolution: {integrity: sha512-njkodcwH1yvmo31YWgRHNb/x1Xhhq4/m81PhtvmRngD8iHPehxffz1SNCO+kwaePhATC+kOa/ggmvPoPza5i0Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -1207,6 +1284,13 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/utils@8.32.0': + resolution: {integrity: sha512-8S9hXau6nQ/sYVtC3D6ISIDoJzS1NsCK+gluVhLN2YkBPX+/1wkwyUiDKnxRh15579WoOIyVWnoyIf3yGI9REw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/visitor-keys@8.27.0': resolution: {integrity: sha512-WsXQwMkILJvffP6z4U3FYJPlbf/j07HIxmDjZpbNvBJkMfvwXj5ACRkkHwBDvLBbDbtX5TdU64/rcvKJ/vuInQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -1215,8 +1299,12 @@ packages: resolution: {integrity: sha512-hbn8SZ8w4u2pRwgQ1GlUrPKE+t2XvcCW5tTRF7j6SMYIuYG37XuzIW44JCZPa36evi0Oy2SnM664BlIaAuQcvg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@uiw/codemirror-extensions-basic-setup@4.23.10': - resolution: {integrity: sha512-zpbmSeNs3OU/f/Eyd6brFnjsBUYwv2mFjWxlAsIRSwTlW+skIT60rQHFBSfsj/5UVSxSLWVeUYczN7AyXvgTGQ==} + '@typescript-eslint/visitor-keys@8.32.0': + resolution: {integrity: sha512-1rYQTCLFFzOI5Nl0c8LUpJT8HxpwVRn9E4CkMsYfuN6ctmQqExjSTzzSk0Tz2apmXy7WU6/6fyaZVVA/thPN+w==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@uiw/codemirror-extensions-basic-setup@4.23.12': + resolution: {integrity: sha512-l9vuiXOTFDBetYrRLDmz3jDxQHDsrVAZ2Y6dVfmrqi2AsulsDu+y7csW0JsvaMqo79rYkaIZg8yeqmDgMb7VyQ==} peerDependencies: '@codemirror/autocomplete': '>=6.0.0' '@codemirror/commands': '>=6.0.0' @@ -1226,123 +1314,123 @@ packages: '@codemirror/state': '>=6.0.0' '@codemirror/view': '>=6.0.0' - '@uiw/codemirror-theme-abcdef@4.23.10': - resolution: {integrity: sha512-OXvzM2bR1EvZfLBnm/ndM1rujplJ9zV4nd7IV0fKU+RsVu94mPeKX71+fag0AQVllU7BGslg6zw8YPG4FVqLBA==} + '@uiw/codemirror-theme-abcdef@4.23.12': + resolution: {integrity: sha512-Jd7RpoUIMP01N3MRieVlUlMgh683E84lCGjHsDd4IgosgrZYfHJITJZVgOiRk+ei+lGUdDOQmr/jg3Zug7Lgxg==} - '@uiw/codemirror-theme-abyss@4.23.10': - resolution: {integrity: sha512-jVumFsnhIXoxHrhXckoLlKPuuYf9q8NeuAQ23MqNtw+K47+9AAk2kQl8/WJDIlXZV6CLN7QfjnMraneYkQFDtg==} + '@uiw/codemirror-theme-abyss@4.23.12': + resolution: {integrity: sha512-oQbTTH+YV0os0kXDidhOgNeoMhDF7edF/O04wUO/75DTVcaBT+NHWO2BsR4R/vX8/FylUJEOUAoRYdX2mEfnVw==} - '@uiw/codemirror-theme-androidstudio@4.23.10': - resolution: {integrity: sha512-rfcqQIxqz+xO8qmEldrsOxIviTs+tNV6Sa6kfxlYmwRxFyx9btfWPW9eSNLIXUO0S9SLhHezpEhgtPcBDxhNCA==} + '@uiw/codemirror-theme-androidstudio@4.23.12': + resolution: {integrity: sha512-2PDjsaAvPxctvF5+i0eL1ntiZI/Vvi72aHSGIZNFMM5NKSCG6z+n9fn9Ci49Dy2JXkTnO8T20G89/DWhGd36cw==} - '@uiw/codemirror-theme-andromeda@4.23.10': - resolution: {integrity: sha512-Cm+5m1szUgdV9+cjYYyVR+r/qE11roeiq/Fr/bo7H7fcLEk24b24tVrc5O35l1JDoaNpNyXw4Zn3JP4hTcl/CQ==} + '@uiw/codemirror-theme-andromeda@4.23.12': + resolution: {integrity: sha512-XM3WQkHmQr/h/uQlODiP6twa81UDNfNAmGcyWww+3HQIGU8eIRFQ1ZGGgT8tZKgCmdfMOC3p0/qr0d6r3/TfYQ==} - '@uiw/codemirror-theme-atomone@4.23.10': - resolution: {integrity: sha512-ugM/OY9pVQM2CQIdRR7BL0hu6j+VO0+o8fh2tCzaixMjj7VT4RZV06MMlzlgDBwU6SgEne0GA9oz3cS1ez584w==} + '@uiw/codemirror-theme-atomone@4.23.12': + resolution: {integrity: sha512-W4/GOQ5Flw7n59/29UD1uJV2iIVxzr7B/7oPTotn0/2BBArRbJNjOEWYQ1b/OgMl3+mZjNozaYgtr44tHHNSfg==} - '@uiw/codemirror-theme-aura@4.23.10': - resolution: {integrity: sha512-CRFeDb1PGRlZC691sxJXA37F0E3Wt5/jate+7GlsWBR6XDf6c/co4g5VqjSvp+ZHeXe1rhRqqCxKplpcWwm1GQ==} + '@uiw/codemirror-theme-aura@4.23.12': + resolution: {integrity: sha512-BnFsSAH+22CvD3AoQIEzRl8s07CR4SOGcrR2cQJWFLFSb7hlEa0myIEx9H4oEHtE1T40meKTx3pk2dpJTqL+IA==} - '@uiw/codemirror-theme-basic@4.23.10': - resolution: {integrity: sha512-S5sgMzRUbzyZP5V5SpmlAno/RawuI2UhK0JxrlGOkmpTkSQ5ksqC8Nxuh1/ML4nr8bj+arwWv+PasaBG185YWg==} + '@uiw/codemirror-theme-basic@4.23.12': + resolution: {integrity: sha512-KepDYk/yb1DPV733N0UwbngRlHmFjGR5gh4UOXZic9yI14ndx8jEJsa36Em8THNSCqhSpmn1zeCbnXBxhyF4rA==} - '@uiw/codemirror-theme-bbedit@4.23.10': - resolution: {integrity: sha512-QECTPDdzGJ1ttBlRsEJ9oBJ9uXrB+wg+yE2NyQ2qjJJxxFLNk0nEp5YqdV9uqt17c1pd1+elVKWaSt2NO9RveQ==} + '@uiw/codemirror-theme-bbedit@4.23.12': + resolution: {integrity: sha512-g5kHD1qTKCjINicbgLEi9MXR4r41klmum4kvFyo/SURwmjTDmetUB+MIqb0fvkcG9VzCiitxHVBl8ZNKxF8PWw==} - '@uiw/codemirror-theme-bespin@4.23.10': - resolution: {integrity: sha512-ldijcA2n08hw/vIOuJ9DRzhMKEgprWNpV3vn6ITjHPhNqmDbOpgX7DKnErOzoGbEPppTRccNn8PGhJSJREVLWA==} + '@uiw/codemirror-theme-bespin@4.23.12': + resolution: {integrity: sha512-XuJlDs5fw2yNUBWRsjamExVlt/EsoZQmlZ97qYhK4CDvc/TjHZysVaMApGcwG56CJhGexf5BAxi8xJCFaDTEdQ==} - '@uiw/codemirror-theme-console@4.23.10': - resolution: {integrity: sha512-0HyT7g6zeFHwRmwimxGaJtf+VawazADpgXDO4sHv4fn7sOC1DYZqpWcU3ePubPNcDk1pv8EturOPIYOw3C+Kgw==} + '@uiw/codemirror-theme-console@4.23.12': + resolution: {integrity: sha512-HnTjbG5+9tQf3Q+2GxsbVrrFfTjf0X+kd6npGMuiAzeakbJVQ+/yzGQVAg4mLU0nnGMZtoayTcdWcuacSIi38A==} - '@uiw/codemirror-theme-copilot@4.23.10': - resolution: {integrity: sha512-qURBFMNi4stunAMEyIlSz7+E+SJsFTcuCiNO1j2SWxyoy/azAfUFElXgNjadj6lizEjwVnzQGXUrRvAPcpI6Xw==} + '@uiw/codemirror-theme-copilot@4.23.12': + resolution: {integrity: sha512-ZICTJQCaFgJI2pJUHuj2NoEd7PZWsOGta1a5jk2SFwlr9JHPkOqrxZixwcoEMEuqtPGdoW+ezXa21H/mlik83g==} - '@uiw/codemirror-theme-darcula@4.23.10': - resolution: {integrity: sha512-dOTRiEmgrwMsOEOdmcZX/JPFh0jdgEFoDUgZwIRiS6QaIZiuEcuGnPBkqxrV1w6VIeRx8Jc+ZVYg1Igrf9QcBg==} + '@uiw/codemirror-theme-darcula@4.23.12': + resolution: {integrity: sha512-+TsLGgPrwmjTyTXEOnjtNCsTpE1B6WDESTEeqFtdonLh2xkkT2URwpaRNumVeeTQ9ink/UBTIiGWmtAWiOa+Iw==} - '@uiw/codemirror-theme-dracula@4.23.10': - resolution: {integrity: sha512-2xTLHT+QwKrY1NKXcbpB3wPoOdR1JVCeCQIT0f4dupudIcJp+XuIDwUX28fHqc5mwk/9pSbiopW2f+hMwL5Cfw==} + '@uiw/codemirror-theme-dracula@4.23.12': + resolution: {integrity: sha512-ZTNKHvXCOdvXF62mLOkrumu5UeegK/Iip7lCwzW3ZdV4HgljDbnWshyLY6M8iLrjTI1QbYjVGJzt7J5lg5xgOA==} - '@uiw/codemirror-theme-duotone@4.23.10': - resolution: {integrity: sha512-h2lIubEMNBZNEvYVMjt08QUt0DhFKXIl44hTU+a60AoL+YLlDJw26KlNbUvHxyqTDZwaX2TZKR5APF14lU+sYA==} + '@uiw/codemirror-theme-duotone@4.23.12': + resolution: {integrity: sha512-8FUZQyUBJTLaxPU4trS7M658F0NTQ3rAjKCw4jW1N/LlUb9zedgxuGA8Ta/YWg05UbuqHKnMXpFlEXKlLzQqUA==} - '@uiw/codemirror-theme-eclipse@4.23.10': - resolution: {integrity: sha512-5ka+NiAknu7NIPerDMkoRrO7p5g6SIkuN0V51A5gjW6wDZtGTHzsLTbU0HwLVaYqm8tKxb0RHhtgnCTPlrN8XQ==} + '@uiw/codemirror-theme-eclipse@4.23.12': + resolution: {integrity: sha512-SVyvHPtrmS47IAnREuqG3YcAFJziLoFNtXg4dXX1IGjJzrT1ONGAn9vfMx0KNWOdGHpFUUFjiH8G3igI0VhZZA==} - '@uiw/codemirror-theme-github@4.23.10': - resolution: {integrity: sha512-jTg2sHAcU1d+8x0O+EBDI71rtJ8PWKIW8gzy+SW4wShQTAdsqGHk5y1ynt3KIeoaUkqngLqAK4SkhPaUKlqZqg==} + '@uiw/codemirror-theme-github@4.23.12': + resolution: {integrity: sha512-yxgycQxA1fNVdrjIZ7H7pq+9Q+BeKLmD5oq5oOlw7kVJrnToOMBylv5oIWplVd2s2LFo47lIhWrVC9Ay3b6Baw==} - '@uiw/codemirror-theme-gruvbox-dark@4.23.10': - resolution: {integrity: sha512-CZVNq/2BTQP87ICPtRlqplq4iZrnXp531dJLjCtMaRR7xTpyi/wDhL36XOVydQQzmDooJZSp54NFR8uxIMtQ7A==} + '@uiw/codemirror-theme-gruvbox-dark@4.23.12': + resolution: {integrity: sha512-5SHwgUjpqSDpcWGM3xSWYZYZPRxq8XfrXB1XoHMkfz7pR5sXuJQ/sj09XM5vG+NiaCXGTvXITxq4QoixczpOqg==} - '@uiw/codemirror-theme-kimbie@4.23.10': - resolution: {integrity: sha512-eyvhlDHMfms3hGYY9zalb146gqvHKmv+x08FSqRktjMTRA7/SjnG6eg1ks0OqdRsipE7UiM7niqswB+yqa8F9Q==} + '@uiw/codemirror-theme-kimbie@4.23.12': + resolution: {integrity: sha512-kr+9nKzPgjVDyfPxRMjbRADrLhYjDsNyQIv4yXEgOdmVBxh6w2yzP0zFB1oEds+xzTsx5uLr1O4m2GfRRJx3xw==} - '@uiw/codemirror-theme-material@4.23.10': - resolution: {integrity: sha512-RN+K7Vc/qea+ky5w0KVCtfwfVqtedicOMKxeBu/fs82tyU5Q5oQ2UlS5Y94Sv05BRkHYluOvMwypUwWkSq2q6g==} + '@uiw/codemirror-theme-material@4.23.12': + resolution: {integrity: sha512-M7yo5mE0QDKjWm02sN6Sw3Ld4/3XvAuJZcEdKYgqmiGI7GKL/nAV8HDfD8iMNm0HGnyozK402WLzb84oB57gAg==} - '@uiw/codemirror-theme-monokai-dimmed@4.23.10': - resolution: {integrity: sha512-9PsCWFviRjWo0WchGj3BEY2/xIJjDP0fiM4sRuaS8FZ0LSC938Izd7o1x2t01SaOYmJyCV516sSjOv+WCFnz0w==} + '@uiw/codemirror-theme-monokai-dimmed@4.23.12': + resolution: {integrity: sha512-cFLtLIR8UT7/32imXsiEONBMHbHsqfXjOXe01PfzvDclghE1TDm/4kn12pG6mjztphnngnxQfYNSn6/eQTOq0Q==} - '@uiw/codemirror-theme-monokai@4.23.10': - resolution: {integrity: sha512-P1KEAiXA6UlQdqwSTpPZwB/VAIaoqtu4oigSUgL9mt+UKzhKyurANFypYSEHmGDuweq80lrOUXCnPc4mOR9xuQ==} + '@uiw/codemirror-theme-monokai@4.23.12': + resolution: {integrity: sha512-axDkBX+txR4NKZLrEkFd+m50UQS579UEqneHz1cTQ8r0z7OYjAZo+7CwdXF5BliJOgaRTC617fyUqEr4X6UCkQ==} - '@uiw/codemirror-theme-noctis-lilac@4.23.10': - resolution: {integrity: sha512-YWi51uw8VqgU8x21b277HJuvjHh9G2qHueatpd7a/h7LdW0elxT3VqkAos8ludjMxWAC3lypUFht76wDWVXXtQ==} + '@uiw/codemirror-theme-noctis-lilac@4.23.12': + resolution: {integrity: sha512-iXc9O0AfeiUp4OXRf+u7nLfz5SQpcCVtvbOxb0+18taAsyisikkKcejUwCUk0hVauW9VKL2FErDkOOzn5z1FtQ==} - '@uiw/codemirror-theme-nord@4.23.10': - resolution: {integrity: sha512-GUGzb0eUZLPzneTmbfk7qWz812B1ak97XOUhRZUPnzVHztgqIVZcwYRf2XkoAySNHU23szZsxxyNZeQ2nAZs6A==} + '@uiw/codemirror-theme-nord@4.23.12': + resolution: {integrity: sha512-U8w3VIuim9H1lEzReeNyv1+uwnPO+gc3gLXCFwxpwF/XDrTQf5TjQFL+c1AGVCYM/Ywf5sflUcF96C3z634r6g==} - '@uiw/codemirror-theme-okaidia@4.23.10': - resolution: {integrity: sha512-9Ccb96a9HPz4a3U0iH6CTHpLVfAeVCSlorp+McBp2QcoJIo1kwZ3hu34MDupW0m9jfdLddAlCEUuK3Kmre8ClQ==} + '@uiw/codemirror-theme-okaidia@4.23.12': + resolution: {integrity: sha512-iunSHnMoCxJMUX1usfEQFIFzKhv807aMArMJ324y49nZLkmwZDuGNLYg2x4MUvdGyD9Ksa9N5eujoq++SekfGg==} - '@uiw/codemirror-theme-quietlight@4.23.10': - resolution: {integrity: sha512-ZBTFmWITpVSGL04QxcGriP3GRMk8VmYM1kk1Eb2PevEmPVBCBlBMB58Px+YBHv6MbpjQ+xes6eVIsg3oblpINQ==} + '@uiw/codemirror-theme-quietlight@4.23.12': + resolution: {integrity: sha512-p/btvByFWjRGNTQXRySOiInub3njLEoepPGqhU8V099ndmprzl3ef9bO5ANWDM7R8p6uciYUp6hS2u+PJPtu8g==} - '@uiw/codemirror-theme-red@4.23.10': - resolution: {integrity: sha512-XHhfbOVI10NZpXRXgXzarHe8VobFxh/7gEGGY84ZnGg4+/7OHtChSSRCkNba/wl987JHfZlx40bpZDOMQ4NDug==} + '@uiw/codemirror-theme-red@4.23.12': + resolution: {integrity: sha512-m9Xu01Ko1WuLThuYDdOQqVS6YgGV5Xz9s0Qv1idIbKNeu/HX8BGeWnjrxjkv6EhIX0zH+uN4wcWELZ7dHZZx6A==} - '@uiw/codemirror-theme-solarized@4.23.10': - resolution: {integrity: sha512-Uj2Qip1aIrjsUB0Z/L2i2mVaQXmOUeoz//wElMfU7ek+21cbVKHvOzHiDWHKb2DNWkvGWOdl7WqqK5WyGhioeQ==} + '@uiw/codemirror-theme-solarized@4.23.12': + resolution: {integrity: sha512-qAh5jS7P/qwLAJ93OP9r4Z3DdZh0SFq0uhwYXckSVVYtuypiYf3ZSpQLuiSQ7I/B9BMPtENt7GgthFIun21PYA==} - '@uiw/codemirror-theme-sublime@4.23.10': - resolution: {integrity: sha512-iU6AuaLsztCwNaEcvVtDBqk+zj46EevI8uccnDGP2orzQJwk+n9OYAnbMiLNXT8W8x6YCffQOJpcxN+Po9m8BQ==} + '@uiw/codemirror-theme-sublime@4.23.12': + resolution: {integrity: sha512-fblzQo2xIZqznuu26FhkUST0JFdcJS1pWGJfJUpR49/cY1yni7LW5m30oAklJT2NpPW2ZFi4YkhCqkOfEzXYAQ==} - '@uiw/codemirror-theme-tokyo-night-day@4.23.10': - resolution: {integrity: sha512-fGa05Pv3oWY799UyWUvv1ug/DM0Z0O6J14aPt6jewVe6MOJd9JHcMZ2tVm35k+wCNwQuflHDM1Z5EfT//004BA==} + '@uiw/codemirror-theme-tokyo-night-day@4.23.12': + resolution: {integrity: sha512-wT/A0SI7aD9j3WQXkLlk3mLk81kZ8ZGD1Okz+YIk+DngH0uD730c5ZMv6AUSHbqAtPePgsjSOHMNS/IQfPOunQ==} - '@uiw/codemirror-theme-tokyo-night-storm@4.23.10': - resolution: {integrity: sha512-ZSWtQFKXl5M112HANpmMQljNB2H/M5mEO+KXMz+7+kMQB5+QIq5eNlCxFmDQIWvpMayXbjkubeB/QrqnHFxQZg==} + '@uiw/codemirror-theme-tokyo-night-storm@4.23.12': + resolution: {integrity: sha512-vBk7OMWmkNlReajWdApBe009SgE6h1D8GnmaamjLklnzuIjEDyy2HnbvRxK4cwaNBGDDT5SGRWVbiiKssLTQdQ==} - '@uiw/codemirror-theme-tokyo-night@4.23.10': - resolution: {integrity: sha512-SXPU+E1uJiJMdhtCHf2FiPqUPWjAckFaeUF3TP6iP7oyoU4wFzBTfzNFSztaRnpnmsY4igRrqMLQGwTUtWdByQ==} + '@uiw/codemirror-theme-tokyo-night@4.23.12': + resolution: {integrity: sha512-4J4+RhN2qXeqNouY79K/0g69xjgTxqefZdfHv7Y7xFh0n76oR387ojHxut+98rcEOX7GKGXn8tXRoyhWWZVcxg==} - '@uiw/codemirror-theme-tomorrow-night-blue@4.23.10': - resolution: {integrity: sha512-NTMLMTjzAXRu4KE+N+GZxS8YDwIdaWAiSiP+kY02cPocHa1Bw6L8FpPhq6MOQxY9U+hzdv0erRWpMxAglSZguA==} + '@uiw/codemirror-theme-tomorrow-night-blue@4.23.12': + resolution: {integrity: sha512-84aXg/k5kERu3iri48sGT/8uap2OQKELjt2Hv+js3mZcBUm7rM39NuSvXueSXhCoR4HjA3aV5q3rvUph9peZeg==} - '@uiw/codemirror-theme-vscode@4.23.10': - resolution: {integrity: sha512-d9qGC6/yq6d+REMZUs7jrs2kGZoAAyNu0USOxsDa3Mqhh/dSUfC+ErDqwF02OfylsdcuPSzelu99EAvkjorpmQ==} + '@uiw/codemirror-theme-vscode@4.23.12': + resolution: {integrity: sha512-ePBaUQiixrpmSoZJWCGXUStKmcM8G0VBv3UqwPR+kNGBjqDife76Gbhv77izSeEI3zRPzL+683BOdclkvWnsMg==} - '@uiw/codemirror-theme-white@4.23.10': - resolution: {integrity: sha512-Nb6XJ3lQBAyMKCRGT2FJU1O6+RhCXiIVqrcfW7/A6zFlgktBMPQPK+YHkt5vUzYN1M58nL21auiMruUK+ROJGQ==} + '@uiw/codemirror-theme-white@4.23.12': + resolution: {integrity: sha512-f2pHnI1UWNQQ3d8A+zNveMQMqE7RVFoMcgCAp5owM6EDI+o0RjyqUPDLH9H1mvCPc84qEMG7ZZrxjaG0C7oG1g==} - '@uiw/codemirror-theme-xcode@4.23.10': - resolution: {integrity: sha512-meYMRLr0mTqGZerihSTKpjceRkxbFfpnXX9RyDZFwloydxzWY/6iGEt1/RMY+cWoxgjBYSiunqtU0qgQH0WAuA==} + '@uiw/codemirror-theme-xcode@4.23.12': + resolution: {integrity: sha512-9KnTjhXFqwGphQl18CGRzRc6yB8SZkyv4L7u+j1QOq/nFmk3i44HKuHiuKY+J+XfFq3Cmxg629o3CK4ecMsvUQ==} - '@uiw/codemirror-themes-all@4.23.10': - resolution: {integrity: sha512-jIjm0HwmYsAJZqDtP+lH7b128A8Ev1mNthPu5HqzHtoKtzRr/oR5vqptZj8YpOHVj1Ip2wAq/V1boBE7uQMhdA==} + '@uiw/codemirror-themes-all@4.23.12': + resolution: {integrity: sha512-nXBqEfbPUK2BQgPfC/18IlWX/jDKRFLVnSANK+NJBhz6coht+VVXfcGPcahjh+HfTvvWb+Ld6ZQAXBBpHxR6Cw==} - '@uiw/codemirror-themes@4.23.10': - resolution: {integrity: sha512-dU0UgEEgEXCAYpxuVDQ6fovE82XsqgHZckTJOH6Bs8xCi3Z7dwBKO4pXuiA8qGDwTOXOMjSzfi+pRViDm7OfWw==} + '@uiw/codemirror-themes@4.23.12': + resolution: {integrity: sha512-8etEByfS9yttFZW0rcWhdZc7/JXJKRWlU5lHmJCI3GydZNGCzydNA+HtK9nWKpJUndVc58Q2sqSC5OIcwq8y6A==} peerDependencies: '@codemirror/language': '>=6.0.0' '@codemirror/state': '>=6.0.0' '@codemirror/view': '>=6.0.0' - '@uiw/react-codemirror@4.23.10': - resolution: {integrity: sha512-AbN4eVHOL4ckRuIXpZxkzEqL/1ChVA+BSdEnAKjIB68pLQvKsVoYbiFP8zkXkYc4+Fcgq5KbAjvYqdo4ewemKw==} + '@uiw/react-codemirror@4.23.12': + resolution: {integrity: sha512-yseqWdzoAAGAW7i/NiU8YrfSLVOEBjQvSx1KpDTFVV/nn0AlAZoDVTIPEBgdXrPlVUQoCrwgpEaj3uZCklk9QA==} peerDependencies: '@babel/runtime': '>=7.11.0' '@codemirror/state': '>=6.0.0' @@ -1374,8 +1462,8 @@ packages: '@visx/vendor@3.12.0': resolution: {integrity: sha512-SVO+G0xtnL9dsNpGDcjCgoiCnlB3iLSM9KLz1sLbSrV7RaVXwY3/BTm2X9OWN1jH2a9M+eHt6DJ6sE6CXm4cUg==} - '@vitejs/plugin-react-swc@3.8.1': - resolution: {integrity: sha512-aEUPCckHDcFyxpwFm0AIkbtv6PpUp3xTb9wYGFjtABynXjCYKkWoxX0AOK9NT9XCrdk6mBBUOeHQS+RKdcNO1A==} + '@vitejs/plugin-react-swc@3.9.0': + resolution: {integrity: sha512-jYFUSXhwMCYsh/aQTgSGLIN3Foz5wMbH9ahb0Zva//UzwZYbMiZd7oT3AU9jHT9DLswYDswsRwPU9jVF3yA48Q==} peerDependencies: vite: ^4 || ^5 || ^6 @@ -1426,215 +1514,219 @@ packages: '@xyflow/system@0.0.52': resolution: {integrity: sha512-pJBMaoh/GEebIABWEIxAai0yf57dm+kH7J/Br+LnLFPuJL87Fhcmm4KFWd/bCUy/kCWUg+2/yFAGY0AUHRPOnQ==} - '@zag-js/accordion@1.8.2': - resolution: {integrity: sha512-JszESCOvftl3dG6lEPjZp2p3+0VN0fwMnW+1jhWwMEe5MZ0y0IrcXww2dxet1ln+w5ViRdOTeDR07idbDKYAYg==} + '@zag-js/accordion@1.15.0': + resolution: {integrity: sha512-EKNeuKx+lOQ/deCe/ApCjVPxpxpDwT2NXvMPL+YvqXmSv7hAnTLs9fDKjbDUQUMmsyx32BsBd8t6d17DL3rPXg==} + + '@zag-js/anatomy@1.15.0': + resolution: {integrity: sha512-r0l5I7mSsF35HdwXm22TppNhfVftFuqvKfHvTUw+wQZhni4eUL93HypJD0Fl7mDhtP5zfVGfBwR048OzD0+tCw==} - '@zag-js/anatomy@1.8.2': - resolution: {integrity: sha512-F88Q+Bo1KOFZPHLffOqiuemkgZJbtspQuyOJcWb0bL7Lc1pYC4DIpIj26bcXT8xICDNcwR877hI0Wko//ZgTVA==} + '@zag-js/angle-slider@1.15.0': + resolution: {integrity: sha512-xIZBa9V6d05uK7+XQVhfdsThqbZKimSYVxtMOWJfG0sKn63N9VGPxL1OtOMq7FA4IP3SyvlelsGt+3t82TUiyA==} - '@zag-js/aria-hidden@1.8.2': - resolution: {integrity: sha512-/SV23qfCWMbGdsNZ2pgmVqOv6a4yd/2+FAIRy/6bjZ8axBzhm7NvfDhqjZciN4JuMch82uafeTBZ7pObk/fU1g==} + '@zag-js/aria-hidden@1.15.0': + resolution: {integrity: sha512-3ogglAasycekTHI34ph16mqwM+VtHCOMtrFHWzPwB16itV5oDEeeMNdQXenHSSyQ/07nJ2QsRGFFjGhPm1kWNg==} - '@zag-js/auto-resize@1.8.2': - resolution: {integrity: sha512-Z+94iR/vbPixiifjF+pmOa1UtuM5TTnJqM7D+Ol3WenRrm+Urp4JWAcyaf76NRVWK51KwMwWLljeA6J0H3V6gQ==} + '@zag-js/auto-resize@1.15.0': + resolution: {integrity: sha512-EXgrsU7OWxc7obSOt8Okh0144H8DQi1S84OsOUY04Uni11Dnp5/X8+t6mvBbkw4/Qyz5UBjChjocwBcO+HHV8w==} - '@zag-js/avatar@1.8.2': - resolution: {integrity: sha512-PWhYVvXyOt+kdi2Vd6GfqGQQruh1TNylw6TzNbhPt3B6Fj6uNvQqfEsh6yNErfnCeaa4b/Q+48rM4b/t3DzM0g==} + '@zag-js/avatar@1.15.0': + resolution: {integrity: sha512-EHGxzXb1mLf3n6x0z/rqFl1mghDB/gyfPAeaFUoA/cacmmMk8YB3aDUXkS9pTgN9stYJBM5f6T4xB1ZUhrP8tg==} - '@zag-js/carousel@1.8.2': - resolution: {integrity: sha512-ViPcVQFQfw8ry3i4m2HYixTfN5Km979TWtMnDKdDM3csXLOQJvfCIHtZ/08wWn1302zaDMQe72+p9jDqzqntMg==} + '@zag-js/carousel@1.15.0': + resolution: {integrity: sha512-ZI9H34f2utdJ2Ek6GZa+iuRH4eC99GHD/VEOKLdGani8uadpT2v8M5kUwPGrlAJq9SiPbQ2UuXBmCkmurPQqdA==} - '@zag-js/checkbox@1.8.2': - resolution: {integrity: sha512-KWVKo2Cofs9bjKf9QN9d9UJ6jQFuKfTPT4smDIqhXo4MIFa5eOd6yxvwbgvLvBlvvr9I6Amm9T4e9XxFbyrHdA==} + '@zag-js/checkbox@1.15.0': + resolution: {integrity: sha512-6lQvPQNJXt7R0xxdpOuh2qtmAkzdBdqSvFIH7fE6GJzJ/AWiRZh0X+9deLQ76CN4EDUdxizEe7MlQfTI3a56aw==} - '@zag-js/clipboard@1.8.2': - resolution: {integrity: sha512-KwyFxLDPkEwjiI6zxRKG1gQk1q+lL1HN6nvGCMKRxoDtYVaY9VRxQ6mVNg2VUIecM8uuhRnkM1WHGrSTUcaFcQ==} + '@zag-js/clipboard@1.15.0': + resolution: {integrity: sha512-Q3kh0fHvOEAJUywQm3zAWyltrYyiI8OpeZQ18k5Mf3/M+bq3gSphZL0+AYsgGbKUg5O2+hJ1SfiErAjyhRtBQA==} - '@zag-js/collapsible@1.8.2': - resolution: {integrity: sha512-rtvR4WaMnjv0cW6f+wYqIKkRGhckqlY7nVYBUjGqIzlKq0VNzRgugS8qWpoqdupQJ9wyjusb/GXLOudqpdl1lw==} + '@zag-js/collapsible@1.15.0': + resolution: {integrity: sha512-GX0kdMlKk4Yk5k/2wN0prudf21k+TfArGr4EHqimTDR0vQE3dSdb3pYyPjw20fLzceKHBBCLsoi2v+YnS75gHA==} - '@zag-js/collection@1.8.2': - resolution: {integrity: sha512-GQ6bMscyX3R5wXct6pIMFNd9vm/Ofux7bAwdavp1RrYu/iMKRg/tLbJIOYMQ9VXpjbiOB+6f2GVtHAM0eYLb6A==} + '@zag-js/collection@1.15.0': + resolution: {integrity: sha512-oC3i6c/oP/FuNPsfgoC1reSXbAvDBGXl0HU3CcvXiNLHbjg2ek8J7kbow6MNuXK6chiksiOHbzKxHl2Oo0Ox7A==} - '@zag-js/color-picker@1.8.2': - resolution: {integrity: sha512-WFuU5T99GPtqiD1MBZlurBjNMpHZQmbzaTgO6mdKQv3IKa2+I2jqXlnTnJbjTRmsF2DhAo45mEyGOvLwlTfTNA==} + '@zag-js/color-picker@1.15.0': + resolution: {integrity: sha512-DGujS24h1OWkYL+TWyd+xukOO8NBgcSfFCINffa4ivkHtNx3nC28qkwLPRASbl7AK69pbrcuO6bx1Sy/JQJw0Q==} - '@zag-js/color-utils@1.8.2': - resolution: {integrity: sha512-6oB+oxCSQoJu8sw1POQNzFLRN1wFDR5b+DSincqBR1QoKLr5K4iYmwJZ7UySvDF8uZATaShvB/qVVxniUpZ17w==} + '@zag-js/color-utils@1.15.0': + resolution: {integrity: sha512-SKo+p5Fu0TBtdDua8UHVjptOkwLLBFoD499Z1FER/gr0R/97L03Kdir0YTxvKn5pXWXYY1EQn4hpTuTITN16lQ==} - '@zag-js/combobox@1.8.2': - resolution: {integrity: sha512-jQo1cDtsUlBMPmBv/P7pUBHpxu19L+Pd5zXWOcdYXXMyFQg/KrW+PLS84G3yk2UCoH7ywKY25wFdMcOrqrTdUw==} + '@zag-js/combobox@1.15.0': + resolution: {integrity: sha512-HBck3wcEeIOa7IQMsUkUKbm9cAU7bjoklIyq2zFGn90k7DcDa++oXK9Z2pmcd4TPoBYiyVuuXucaCcjmLX8V/Q==} - '@zag-js/core@1.8.2': - resolution: {integrity: sha512-vZDvvXuoxKnVXqBS6H6ZGbfxRWaQ9DStVS/a+tLdP0pz05NJwyJIPSWOOHZo9XPDiN4j1mRaTVcSvNpuOSEDTw==} + '@zag-js/core@1.15.0': + resolution: {integrity: sha512-P/8F3IXabMhpFnc6hC7GDg3rvUnvY27cuZU04hxjUqTH6+SfORIA/Uvqd4ekhC+dIprL9jicnFrmGgcyelyxfQ==} - '@zag-js/date-picker@1.8.2': - resolution: {integrity: sha512-SnZgQOxUajnuQUDIcq73Gxy+fifm3/F0H4tokE8LAbbkcf5kr/Pyin+2amhiXBkbDiUbeCttx34TlD4HXwmjyQ==} + '@zag-js/date-picker@1.15.0': + resolution: {integrity: sha512-IZD0V9MAljp1QhxYbST80AonryuDnyx7hvEy/RrBY/VOx6I4STtKfcSJ5ZZgVIzJfH8Yyaed4+IwcenqG7W5YQ==} peerDependencies: '@internationalized/date': '>=3.0.0' - '@zag-js/date-utils@1.8.2': - resolution: {integrity: sha512-KFMcZMb7xC7XypH1VDQIiYv4dpxB+1JEG2QX7zbYos+QKd41A8tNtaDnfJX+iePVsJV156gqiOrtogNvz4rJ8A==} + '@zag-js/date-utils@1.15.0': + resolution: {integrity: sha512-FX9EesJRnUTYTpbXf5EVfCbsXW5vYtZfc635aQzojc9ekk1FGcHpqQs8ZKfCOTPuauZFOX9i6139A4KoPfQOiw==} peerDependencies: '@internationalized/date': '>=3.0.0' - '@zag-js/dialog@1.8.2': - resolution: {integrity: sha512-1XJIb0/YNBV5LgcRQ7ZwS/GvJiIy1e/iaZvYea6RRAInxcNH6KFon9U1Hm1Lfdz9GryCMs32WDhlFcYQoeGlKw==} + '@zag-js/dialog@1.15.0': + resolution: {integrity: sha512-Vlt5vySs4u8c8xBEh2JMUvRfPc+aaVEIIUtFVxpc2ORWhBXs9glijyp1yf3rNHJhjj8gqqhF5sEvs3yUTTAk+Q==} - '@zag-js/dismissable@1.8.2': - resolution: {integrity: sha512-YGQB60pr/jbldJlt0LtToriJEMX8ds8uxienPModMgzEPo7yEDf30VMo4Ix8Sm38E6CJBOcm87vKHrrD8aEfnw==} + '@zag-js/dismissable@1.15.0': + resolution: {integrity: sha512-yv575KWy8gA1p4aajOiY5l/nBQ3Xw+Mrjpungp1+wiGd/98eNAIKJ6/adldfbE1Ygd/Q4Dx2VQ7D1AmiTdwUSw==} - '@zag-js/dom-query@1.8.1': - resolution: {integrity: sha512-+2DYAW9riWnAAf7etTkaVqpaTHjYSHYGExJtBmZ6KurmYsc7Uw46mAcIImakZhrg69AI0cpL4b2YJHMQz8GGZA==} + '@zag-js/dom-query@1.15.0': + resolution: {integrity: sha512-z8H/j/Zs0eZEsGpbonScmlKSv0jEXKiAwUCrvQ9Mt6Gz9n0CQRM3MkFclSsM8aeiSv6qKLlhPfkzjl18OLkbgA==} - '@zag-js/dom-query@1.8.2': - resolution: {integrity: sha512-bn6Pxga19PJzpDb+Oh326kn1sgVfO97mxRzRFqzrKz9NuANGlCblmv2NTYmhfppqE1nt9QyLLhyQ2BLbzwouLg==} + '@zag-js/editable@1.15.0': + resolution: {integrity: sha512-F14HKZuDsfkpfIkaF/ZDYPkz/pFf6VHrvoV0rdhj8wb8QJQ4nB+lgBv2APSwkEaFb/gGrnE19v3Ojlt5tqpPsw==} - '@zag-js/editable@1.8.2': - resolution: {integrity: sha512-NFg5qp2IzE0nvDFf+UyFIIHGFBCyB5r74YIVBb0oJnVcIzrYa1+HA2ZrNMzTnjpZdx7B5lE/99VAsvk2Mb+GtA==} + '@zag-js/file-upload@1.15.0': + resolution: {integrity: sha512-2hAlQr9qdT8EH4XnmkNkEIDCCsmp2SMoMAjq6nJKYO8UJNQGRanU2B5S8jV3quJBz0vIY43SwyvqiZ3+1VrJSg==} - '@zag-js/file-upload@1.8.2': - resolution: {integrity: sha512-b+xt9W5CqFG0NCB4F6C29FcFPlV0q5LC7m7mj7iMhk+dRkWPVhxr9o5SFPtjXLZlncFNgHfMkBU7Ktx5JY8CSA==} + '@zag-js/file-utils@1.15.0': + resolution: {integrity: sha512-tahJt3JmrXaOtGiknH5PxIiOyyNvroMfjiBqOqnNksIPzDoWmVNxHOEme/ts7dJlkRD8U2qm2NFC2VS0bKerzg==} - '@zag-js/file-utils@1.8.1': - resolution: {integrity: sha512-IdulHjOzPeZWNURY1rM/FbltdnXIOjUsOA7wWAped6oMMtDmWlrfpKtFs2emnXd04mZLnZN9yBO5WtHI7TTWeg==} + '@zag-js/floating-panel@1.15.0': + resolution: {integrity: sha512-AYYFseA1MeQUZl+zjNoKUu4j0kwz8EyJd4oJjs8uJIR6KG8u8QhpWYIBUny63M6AtZTCSYQAgBEcEh+mrbEyyQ==} - '@zag-js/file-utils@1.8.2': - resolution: {integrity: sha512-VBn2PeVtfj4c4snVcvp9oVFFiOVwJQ1OvS44CXv2xl9u4hRnDVSHalNmdj5jOqspNmTy9xNCKQWPK73ef26msQ==} + '@zag-js/focus-trap@1.15.0': + resolution: {integrity: sha512-N8m/JpNe1gHUPJlr0hyGUdHg6pAuyJKkBaX0s38cyVntlo2CJhyAWZGuUdocpT2Q3HNPql666FNnH986rYPDKQ==} - '@zag-js/focus-trap@1.8.2': - resolution: {integrity: sha512-GzKdicdiVjlOOsNzmmRAZVccs902PXnoyO+qkzXlIsr8+RPRgtPlZthIp6wtr4CJ2vLOMByvrEt7wCNSIoDzxA==} + '@zag-js/focus-visible@1.15.0': + resolution: {integrity: sha512-TPXBf47tj6L0hhZNl9AWhuLoVzfPaNPM+/Gw8t9l9Whvy6v9rk/rqUCidY5LsrQuPiKTi7s5WI5J+Wod8ib3gw==} - '@zag-js/focus-visible@1.8.2': - resolution: {integrity: sha512-YXkB4ClgEf/gTRGUrTDThvxfThpey41dDKcuQIPTA6F76ji4jLQiDYLnw4KDxLW8uLL21jZgctO5FFdIMoxJeg==} + '@zag-js/highlight-word@1.15.0': + resolution: {integrity: sha512-Rwr/rRm8BaF2xW9BAEJeA2wpFVx6HzoezfYQX7GFPPgw3N8nBMAYNjx+i1YIwIEcNyad2rbaBB+pSd2fZLIniA==} - '@zag-js/highlight-word@1.8.2': - resolution: {integrity: sha512-yI65t4bFxTUkZbHuntRCdBPOEQdpO8G4nkoY8WznBetQ1LLhqOd+7KXelzq+Vot2RbXzop54xEBvgKeTQbGOgg==} + '@zag-js/hover-card@1.15.0': + resolution: {integrity: sha512-j6BsE+metdnv/C/Ls0TZzAMN78rtS2r8M1ccHY5FFTGyUvZnlE8BY/QPNyCSSSCUpynymzMYh3IMYlxbJgfpSQ==} - '@zag-js/hover-card@1.8.2': - resolution: {integrity: sha512-GwYGsojbVpyhOCz+XUnEtxA9ZmUlnfPrnE71j/Gc2+oLtOFwvnhINtBTZPCUXO5ec95uG9QFwxc63x1upB/PIA==} + '@zag-js/i18n-utils@1.15.0': + resolution: {integrity: sha512-anxSbT8kLbJaFJFSb0Ork2j/Lp+XVfMNCIgiBR2BuqUlfX72k23TIJvRxAfwNIkUfs0L8ikaSgLss9OwS4mAnw==} - '@zag-js/i18n-utils@1.8.1': - resolution: {integrity: sha512-Epj/VOsJppsHlo2YwGV718CsZEneH9OVZtD8LB7j/zGXjQr/LALErCQQVOJXlBO6Ky2G/ZE/vK4LyO5GIjkTKw==} + '@zag-js/interact-outside@1.15.0': + resolution: {integrity: sha512-OwBf/iesQGU9Oq3xe/tcK7gu7xipiGWsmwl2CcScr0fTp3BIMbQywHS928IgPk1DxA8KTHodY8wBjoY1dskfRA==} - '@zag-js/i18n-utils@1.8.2': - resolution: {integrity: sha512-Zhiw2U14kkYRPru/5nWYei0l0eiQOkTu2VDCc/mn9jd7+zDEIYNp3b1CvMQ3/ES21i1HH6uBuKKujuktH/f6Iw==} + '@zag-js/listbox@1.15.0': + resolution: {integrity: sha512-Gcg76uWZwUAyMFZzGWpHnFCU/aaquNbXmVnyzzBgE3Co2snkv02rK1yG9iBwemZe3e5+VBifMMAtLLPAQJdz+g==} - '@zag-js/interact-outside@1.8.2': - resolution: {integrity: sha512-7N0v/vdsJO5a7AjwWofZ99AP5/hzFfCShSgEfg4GpRk7gPOdFanm7U3Zy9LtVvM9gwRncqGwjo4yd6O5G7SCnA==} + '@zag-js/live-region@1.15.0': + resolution: {integrity: sha512-Xy1PqLZD9AKzKuTKCMo9miL1Xizk/N8qFvj64iybBKUYnKr89/af3w7hRFqd2BDX+q3zrNxPp9rZ6L7MlOc7kA==} - '@zag-js/live-region@1.8.2': - resolution: {integrity: sha512-QkowjTQj9C6ZFSCB+E7QNU5yjWMA58cAR5TcWgdLLKAP+SJwaTdtptpyFq71VH+jT85sNvvBZVya1aWZrbGopg==} + '@zag-js/menu@1.15.0': + resolution: {integrity: sha512-GbEBVYu0w7+88xrGX2GrjXfnwWuX5jLhoLiEcuxvxJQal/nahKrH4AGXJvHXNaRbj+53V3nWAh3u70C9210PWw==} - '@zag-js/menu@1.8.2': - resolution: {integrity: sha512-kEz1FJ0kgkutN1XDpS27GAkk1T/v3fUctBHrj0Wvt7TvQfPyzudyjmj35UEP5e8AglJAoQt2Am93YPSQ2deJwg==} + '@zag-js/number-input@1.15.0': + resolution: {integrity: sha512-+kK8kyXJhIAbEUnswoMDR+DSJUmvDNIOW0ffuZ9pbfukN3p6zaA3/dCp2Dtg3bQS7hGrFWgtrdejJ8l+mVvUAA==} - '@zag-js/number-input@1.8.2': - resolution: {integrity: sha512-oyxXI/FDDj40BMkkLHDu84me3TgLIZizQhMj51R3ZM5Qg5BucYbamQKDgcGbb2CI6BUPo+6jklO0QZmy8/2cTQ==} + '@zag-js/pagination@1.15.0': + resolution: {integrity: sha512-Z62Q41fQPWqk59QyJk+9J0Ad3H9DCqZ0zZutI6iH8DdzT0A0xxmT6zhup6DM/8C8h0OLlaHFTWQnj0RdRNrnXg==} - '@zag-js/pagination@1.8.2': - resolution: {integrity: sha512-+Ummfw6r0Ll4oFVRvoVhPSvox8y2vvIocjGip0e6ze8zaUuHgUYzNkcK7OalZ3pZkh9y0+9MlnqtsQwxZhMJPw==} + '@zag-js/password-input@1.15.0': + resolution: {integrity: sha512-oHuZKDRJIbycqWpTVznufy4L7K2g8kwcEaZ4runkwO2ocF00zP8HVmOZQzmhkUgTny0azErQydg8XE0VR5OfYg==} - '@zag-js/pin-input@1.8.2': - resolution: {integrity: sha512-TME6Maud8Z78ZxFru7WvBGf5EQAuMoPQfdTMpd8os24srtO+HwiFN1wbeBsV/6BmbOeA9gFuB4K8O8rqNn3uqg==} + '@zag-js/pin-input@1.15.0': + resolution: {integrity: sha512-IykjogZBG+BfbFXymSa+KGpOi5CrV9kl8HRm6G2V2Sr3NA5jEwMFaGSd/QrcHS9vh23D1Smx/io4pvF7c3q0kg==} - '@zag-js/popover@1.8.2': - resolution: {integrity: sha512-c3uk6t5MG3xluf2LR1adOGnCsKchfRqzB7K9/fyBvWXBFyFiV5DWXdc2NpnzvB0Z5fQVJMrBiMnpvmzqbVovAA==} + '@zag-js/popover@1.15.0': + resolution: {integrity: sha512-cdzEed3zcGbjSgPQnQnrsuXo2hVVslmSNwQbU5dHcNzG1uxxmtPCIMVeBUmGyJbAFF5XQpKCq/7mIr26dT73vw==} - '@zag-js/popper@1.8.2': - resolution: {integrity: sha512-OfZS5KKQZsaENZG1SliM8/shtAKmKrprJuWpn3/kzcOAO/obNZfApld4oa1N5FoePLLTY96qVfdC5W9xygKRDQ==} + '@zag-js/popper@1.15.0': + resolution: {integrity: sha512-Ra/0Ko423KN+8D4+mIFFkeTn9uaHfpxn6UUNIWwZKoiJQvED8DH4dPbLbmvGEoKp6qmisnRHAzi71NLgEhk0Mw==} - '@zag-js/presence@1.8.2': - resolution: {integrity: sha512-aT9PPQAY28HeAxiSeIhnOmlkI+tw0ippxtUWenxQ6B3yyU/ZOGVqc4f7eY418z65lF2yziYvUkZgOdWc6E4kZA==} + '@zag-js/presence@1.15.0': + resolution: {integrity: sha512-hoxXis50pm79PpkY2kA1wdhh4AEo7t7pBv0VsQYZYjmzuFh4V5IMw9oa1EOfBlC6f/A+EMZ9E+xg+EVsB68a8w==} - '@zag-js/progress@1.8.2': - resolution: {integrity: sha512-QUzPe5Xj0zSexKJ1+JCmQnJ+pZ5EeRjMLWSn4cdeUJtzEuPosBLCzJtMzl+uZ/mTg2YVgPC7l6wV6nfMYrco/g==} + '@zag-js/progress@1.15.0': + resolution: {integrity: sha512-/Mz26GR2rOAuoErNOiSGRpvwckTmbCD5nWGDE/aYlVRID13HcsmN15Zk2Jfa4LadqK88aIN8Iy0Sk4elG0+Efw==} - '@zag-js/qr-code@1.8.2': - resolution: {integrity: sha512-W47UwF5jBL3NraobAOC9aYFpMFiXhDzgZ6O3f4Zhd3eDx6BnUvebZ+GOfE71EmJ0fu43mF6o3ial8H4nxj2myQ==} + '@zag-js/qr-code@1.15.0': + resolution: {integrity: sha512-GkGy5k5tk6DIui9lGjDO8+e8TsSVOxEGp1lblPiaRm1ggIh10GhIfCQWGe/x78ezdie8WzxlSrma89suTpaiAQ==} - '@zag-js/radio-group@1.8.2': - resolution: {integrity: sha512-WY0QT4XkqgXD1N1VZG11gTnu7rGaPYizZIq/m1NS0ls6b/tTnwdlrPL2bgBzlJtyuuCeQJXh5pTypCiNoAZurg==} + '@zag-js/radio-group@1.15.0': + resolution: {integrity: sha512-+KTebHUtMsE/YDyGE8wF5VnWfZQp+f2WoAwwzBjfhPpRxXbOUMDo0pZEEr3yxkSvQ9hgCcBhMKH8pEk0SPxvjQ==} - '@zag-js/rating-group@1.8.2': - resolution: {integrity: sha512-azCMgF7FAyvDJ+fcAYzFQHhZpeydPW6h7JvYIvLsz/K609D1HJT85gtCzG+drgBhE4tRyvFdYKDkTCvOpVnkGA==} + '@zag-js/rating-group@1.15.0': + resolution: {integrity: sha512-omGKN97FhplFwBX9J/Mj7BCZuwFXSXssSVTKU7Yp2d1Cmxhez4+Ju7KdSRNnIoWB4OxFCxwZyaAPTcg3E0Pjrg==} - '@zag-js/react@1.8.2': - resolution: {integrity: sha512-Fz9WR6wZQOAxCLSTSmUnGL+VH2/HVxvdlOKOHoUrJ0+9QOmlGrZf+mxpJuGgqUW3RyMzzpHfly8TKZkqHRYd3g==} + '@zag-js/react@1.15.0': + resolution: {integrity: sha512-YSp9QBkdeBfZt4nVhJW+CUd5sNEEVAuwkmoZWDFUoDoWSAXwzSKuHCmTm5/8DaXg1IZD2bMrXgMNDqZv2x0hZw==} peerDependencies: react: '>=18.0.0' react-dom: '>=18.0.0' - '@zag-js/rect-utils@1.8.2': - resolution: {integrity: sha512-RWgPe+MOtOJaWjvlGD1Cf1snmD+hem1mfxXqM3XTCZCjuAlqAzHSCyuBUDia96nU0YGgAxYuloQLh8HFLJcVPA==} + '@zag-js/rect-utils@1.15.0': + resolution: {integrity: sha512-sjAn78x1t3XiDG3NT8SoFfyO0u7/SEJU5RKRhMgjTPoOLXTzZj+lu2d5N4cUw0uZTfeGb/ormObSchMQVhFgYQ==} - '@zag-js/remove-scroll@1.8.2': - resolution: {integrity: sha512-zJvLCKcb1yWEdWCP+cDhnYTY1MyoNzuiYOwWTh2YiktQYC0zpd2KDbd+jdhSWIpbIdV22UMuy4sDfFpx6i/mqA==} + '@zag-js/remove-scroll@1.15.0': + resolution: {integrity: sha512-vdWSAdgY8wJ7s4YeaKwTMwmZiRMBxCehmdktSxBWvwtAjU1cM3UWvjmZ9E6INJrQXxH9vDpe/rpFSyv1guIQIw==} - '@zag-js/scroll-snap@1.8.2': - resolution: {integrity: sha512-kyM4ZsRvq5WuJJZVr1TQ1xjuso0ANhySMtILH1kC9EFGIOwZegnIpZt5K1rf5NBFmBrcBjUl+lEKwySRNFauhw==} + '@zag-js/scroll-snap@1.15.0': + resolution: {integrity: sha512-/LfBlsjoR4tVL3Djus3k9jKLhwC2ApdHTACxEc72TAewoPe4M8icnSDLXmKHvwwOhzK0HlFz8wGm6ZncAbQbuA==} - '@zag-js/select@1.8.2': - resolution: {integrity: sha512-ZsBU7kGp8TX4gNavmiTWz9cB+6KgqHXxSwgARnaYUBsYhpdDG2SYfzgyfGAYcAv4ejNTFEfvNk89h+Kpz4CeOg==} + '@zag-js/select@1.15.0': + resolution: {integrity: sha512-4urUBADzhrsGEO/UsqHdjsgmDdF15Zzeid3ejEbIMTrkt2/mMMcQ1CShuxtsWqm2EUBz/N1kOcZlE6Tq69n7Xg==} - '@zag-js/signature-pad@1.8.2': - resolution: {integrity: sha512-Jl3kRbxo3fkey9uqdVDyGROlECa3MpOXaMWDzO58vodrOjjLnZPO1VPF4xvjG5LUsEOGx54R97Tpc2hS3t93Pw==} + '@zag-js/signature-pad@1.15.0': + resolution: {integrity: sha512-5Tj8vkrRxEkSV417oR2qdy+TRgDmS3W8dY7xsIjpbBf/kqkt/8Uo4JpaVH2vwQAFw9AwEFogBh9i6dHcXMy0rA==} - '@zag-js/slider@1.8.2': - resolution: {integrity: sha512-+tncZezgA4FVHV6M7a6lV3cPJUa5OsP7ouXkYGw7Z3cvOoFLaL+bxaCe/UHouRTKqoZj4ImR83x85xcIj50e1g==} + '@zag-js/slider@1.15.0': + resolution: {integrity: sha512-NYIsn3GKXIoPmvkDXsQmw9wdYg3QHbYHXnZ8Ewl2fVubN7S5mDlHSZs2iDVsBvX+a4RChWFRO6JHX8E1+BncOg==} - '@zag-js/splitter@1.8.2': - resolution: {integrity: sha512-jcr382kBA/pRrQu04PVqB2U4Tn32wBCbJMX4UC/tmuVTP5RwQrA4WaDs21CelfntI0qEbzCMxFfYvbU7+ma7iw==} + '@zag-js/splitter@1.15.0': + resolution: {integrity: sha512-Xnedl+cpnD/hv9m+GOYCK5K2xRxbs4xuP/EajYtgVcDw8E1X5cBmxHa1hCrp7BMgb2xYCvZ5et4hnmZfb+1X9g==} - '@zag-js/steps@1.8.2': - resolution: {integrity: sha512-iCwaiT6q0GyhZCnHH9bwmQfYGqVmN5ObF+efV2eYDVsuICKe/PlEHL7H3gRClJR6x6FehXmYYI/gCI/PLzsuHg==} + '@zag-js/steps@1.15.0': + resolution: {integrity: sha512-VoIDcDIEErZawmW2m0yTGlffqjfRuSwR37K9LdSRy8Q4Qzz3wV7jASaTjMhTya1hlreJ7tJg+Qbjqowvw9GndA==} - '@zag-js/store@1.8.2': - resolution: {integrity: sha512-Q/sg8L5B3lbX1MWFJNhE5bcPzJrwhRcgDGtvKf8KDKcbcirhF5HiXUbbE4jvav52QVQYKru+WnOJ8WVj5Bi3tA==} + '@zag-js/store@1.15.0': + resolution: {integrity: sha512-ecqjcy3b1GsULpsT8RVJV9KDaikajRN0XRg48HMvaGkaPIvxI6esyrE6RKnShuqr2eVXIPghgBnCnrJUev4UlA==} - '@zag-js/switch@1.8.2': - resolution: {integrity: sha512-WYgtfzponocm4rrJcG4CNy1xsOwOXZ1yE9NBNKvew2Cj5yZLpTQLcjJBlWR5VjZ3Tgx+3D/F2nmBYzVFtU8zyw==} + '@zag-js/switch@1.15.0': + resolution: {integrity: sha512-2CaAUTi7jM4lJjCYoSE1HWlFPCifI5GR+hufWOCYKpanf8VA/LM+t/a2Aq5QoBsWdcQv3B9mHxF/aVTDbnCKPQ==} - '@zag-js/tabs@1.8.2': - resolution: {integrity: sha512-aM7gx9aj1DcyTV6T5H7okMHWBhi/0jdjhUhFRWWSdYxiYvpveBhVK+Tvg9Nq9GBqXZEgg8E1hxuLgPQUZv7QBQ==} + '@zag-js/tabs@1.15.0': + resolution: {integrity: sha512-voHWpibC1TKLmbAJfixOesxrCio7wK+gdLRvh7Xh5u+3VSsT2fP2wEw3ySkJbpw3MpEE7R2OWkInbCV/SwPcsA==} - '@zag-js/tags-input@1.8.2': - resolution: {integrity: sha512-9DF2pXz6a6lX5IiCwg8ug0TSLZ3FILIHUaX9WNBSx7afDlCMH36UgKhyfs2Xhl9gliVC/6a0Tr2sX5VDEYCe7g==} + '@zag-js/tags-input@1.15.0': + resolution: {integrity: sha512-CB60z+/I/Nso1gwatTO1qrk4XITxDd4qtRD+l6fuuKyOkZGgKm0AP0W+/6qUuOvtWIuY6fas3yZHFmF2eEZ9vQ==} - '@zag-js/time-picker@1.8.2': - resolution: {integrity: sha512-RdAPrRBeuiCL7m4PdEZOR6YzfQfOeNElgjEAVLZgUTu4WEhLt/XVdjaOuUQtiuLW4ukT72wNVWi0S+NBCHerIw==} + '@zag-js/time-picker@1.15.0': + resolution: {integrity: sha512-4S02433X88X3MW/BxaFJiWna4BIRXsAdrmDcBb0PZ8dln29DUmpD8YHcFtONsKvmCAmrbO7Gr65n86nQwK8zeg==} peerDependencies: '@internationalized/date': '>=3.0.0' - '@zag-js/timer@1.8.2': - resolution: {integrity: sha512-EUqVacZyrKuvzDFHRZLYjDzNwMyr/5cQCu4m1Da4nv7hvqivDvofU2HUUf7mi7isuYuRaRAZ6sFQqknmvfbKQQ==} + '@zag-js/timer@1.15.0': + resolution: {integrity: sha512-gDsYm4C9yju7g/r5u7n7mRQ2UY7diXXVbbLFr5Ja+0iUXgbD+uoSZEt9HypVc5TL9NWEEwn5/tut36owEeW4rw==} - '@zag-js/toast@1.8.2': - resolution: {integrity: sha512-ew+lfy8y5j4HWj5Ir9RoSfQKlbZnmGnn1r8GHMBhQXegWVGWAb04n4sp7t/e656iBif9HpLm3+/SUwOdCPIiJg==} + '@zag-js/toast@1.15.0': + resolution: {integrity: sha512-0RupMCXyGr7/La4Zlei7VqBF0VPNJelGd7zimLboe+IKZyy4Ypi/N2IX14rl8JZQDsDEgkLUl33xrSk/9RW2nQ==} - '@zag-js/toggle-group@1.8.2': - resolution: {integrity: sha512-kBvFQtUJ70PpqJ6aA9uLCXLvSTiUMhzX3GkJbmTxffu2BdVKUF5OEKW3x9VpYdPeekBnayCXoGdW7WEOkgpYGw==} + '@zag-js/toggle-group@1.15.0': + resolution: {integrity: sha512-992vMz/2sriLrUKI3LpT/01kCGTbPGLgGLibiHRt562i0v9+2tV+GiY2jBctHZjJaKPrzBY3H0l8CCCvDj8gng==} - '@zag-js/toggle@1.8.2': - resolution: {integrity: sha512-2EebV04Hv25ex1jQVa1Cjb4A85qcC6kvABn4qR6wZooxf5Ua72C9sdiEjrAvMhDGAWaa37JuxlyYs+sZG1l0Lw==} + '@zag-js/toggle@1.15.0': + resolution: {integrity: sha512-mMSQ1+f1hOMp/7gLA7rTeiSNyeZxsCjRxP4XnTBY4BxJ5LswLuhem9CplBwaVthkhY1Y/5f3HHu80LBcfF+BVQ==} - '@zag-js/tooltip@1.8.2': - resolution: {integrity: sha512-FqDq4H3PFnEJt96JCr4dap3Pkcq2D0Gb/G5G5gG3QAs7kOIHL2Jpq1CGCxE3EpmQOFee1HwyokC6R4Q4kot1Nw==} + '@zag-js/tooltip@1.15.0': + resolution: {integrity: sha512-sOpVECyfdS4RZBx46mSV+RPc9C5k9JvYQYUfoOVWh0E5RLSEz5bQm5xxctKOHfCOv+vJNTfG5gP596B1r2+Fkw==} - '@zag-js/tour@1.8.2': - resolution: {integrity: sha512-67Qw+dYY8ayf1x0ggvU0U0MoS0I/nhVe9JRpabPjYc09123DgGsDA4sdbj6VfCeFW6j3kffn5VEmTm8C3yV8gA==} + '@zag-js/tour@1.15.0': + resolution: {integrity: sha512-EplcxoiE0z9vI0z6675+ABclQ9Mi1YUWhDZOHx7wfjRzpfawmJoBAlNDKzK3wc801d6OxgJx69SPj7ac0BwwwA==} - '@zag-js/tree-view@1.8.2': - resolution: {integrity: sha512-l/JmKjkz/BM59HVscazl8BMJj+suXl+FNRQVZqhyijzlb2PrB5xtgiQNV9XLNM2qHBCub9820Y1YMLyEP5YiwQ==} + '@zag-js/tree-view@1.15.0': + resolution: {integrity: sha512-wqdd+hu1bDOCWtnZ8MarRFHqbZF2t8qKBM3kO42IBq7jTI/93LCkHSlceEPft9dgZ6Ea9km0YJMHhoTqCPZ/fw==} - '@zag-js/types@1.8.1': - resolution: {integrity: sha512-gJU3UlRccL2N4ukG4xEtetAr/fiuFBxpG5IKZ/Pr0zz8Z17LpdhK7ozyn9SU7y9W6YOcngByAgNgz+nRzmu5aQ==} + '@zag-js/types@1.15.0': + resolution: {integrity: sha512-lV2ov2M07BlmjDUCSwBeHxPApHI3oAiLytG94AqcYvQ0BtsCRo5T60yRQ0syFc6fHf0e9+kwt89uoIgfGFYfmw==} - '@zag-js/types@1.8.2': - resolution: {integrity: sha512-J+94HhFAPOBchNdGcmvqjB8nbQFgKHcqGoPl5vNTKlcoibN0yFjn4XFZoQU6uCf8sPhNg6NUNTkluR5YjybyJA==} + '@zag-js/utils@1.15.0': + resolution: {integrity: sha512-XctFny5H8C00BsougV40Yp0qVEj9M2d/NRme7B33mon9wG+3hscZwP6miJmF6BYI5Pgu6e2P0Sv45FddQU1Tkg==} - '@zag-js/utils@1.8.2': - resolution: {integrity: sha512-7HnRAQ7+pR00c4BQChulTdf6G1gJ0NqV4mMKd9UXk4/E7GLYinUdBNAZ3jZCdHDrio3+2zIlNvpzkO3G4pVjlw==} + accepts@2.0.0: + resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} + engines: {node: '>= 0.6'} acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} @@ -1766,10 +1858,18 @@ packages: base16@1.0.0: resolution: {integrity: sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==} + big-integer@1.6.52: + resolution: {integrity: sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==} + engines: {node: '>=0.6'} + binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} + body-parser@2.2.0: + resolution: {integrity: sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==} + engines: {node: '>=18'} + brace-expansion@1.1.11: resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} @@ -1789,6 +1889,10 @@ packages: resolution: {integrity: sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==} engines: {node: '>=6'} + bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + c12@1.11.1: resolution: {integrity: sha512-KDU0TvSvVdaYcQKQ6iPHATGz/7p/KiVjPg4vQrB6Jg/wX9R0yl5RZxWm9IoZqaIHD2+6PZd81+KMGwRr/lRIUg==} peerDependencies: @@ -1867,8 +1971,8 @@ packages: character-reference-invalid@2.0.1: resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} - chart.js@4.4.8: - resolution: {integrity: sha512-IkGZlVpXP+83QpMm4uxEiGqSI7jFizwVtF3+n5Pc3k7sMO+tkd0qxh2OzLhenM0K80xtmAONWGBn082EiBQSDA==} + chart.js@4.4.9: + resolution: {integrity: sha512-EyZ9wWKgpAU0fLJ43YAEIF8sr5F2W3LqbS40ZJyHIner2lY14ufqv2VMp69MAiZ2rpwxEUxEhIH/0U3xyRynxg==} engines: {pnpm: '>=8'} chartjs-plugin-annotation@3.1.0: @@ -1954,9 +2058,21 @@ packages: resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} engines: {node: ^14.18.0 || >=16.10.0} + content-disposition@1.0.0: + resolution: {integrity: sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==} + engines: {node: '>= 0.6'} + + content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + convert-source-map@1.9.0: resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==} + cookie-signature@1.2.2: + resolution: {integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==} + engines: {node: '>=6.6.0'} + cookie@0.7.2: resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} engines: {node: '>= 0.6'} @@ -1964,6 +2080,10 @@ packages: core-js-compat@3.41.0: resolution: {integrity: sha512-RFsU9LySVue9RTwdDVX/T0e2Y6jRYWXERKElIjpuEOEnxaXffI0X7RUwVzfYLfzuLXSNJDYoRYUAmRUcyln20A==} + cors@2.8.5: + resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==} + engines: {node: '>= 0.10'} + cosmiconfig@7.1.0: resolution: {integrity: sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==} engines: {node: '>=10'} @@ -1974,6 +2094,9 @@ packages: cross-fetch@3.2.0: resolution: {integrity: sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==} + cross-fetch@4.0.0: + resolution: {integrity: sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g==} + cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} @@ -2114,6 +2237,10 @@ packages: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + dequal@2.0.3: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} @@ -2148,6 +2275,9 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + ee-first@1.1.1: + resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + electron-to-chromium@1.5.123: resolution: {integrity: sha512-refir3NlutEZqlKaBLK0tzlVLe5P2wDKS7UQt/3SpibizgsRAPOsqQC3ffw1nlv3ze5gjRQZYHoPymgVZkplFA==} @@ -2160,6 +2290,10 @@ packages: emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + encodeurl@2.0.0: + resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} + engines: {node: '>= 0.8'} + error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} @@ -2210,6 +2344,9 @@ packages: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} + escape-html@1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + escape-string-regexp@1.0.5: resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} engines: {node: '>=0.8.0'} @@ -2222,31 +2359,58 @@ packages: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} - eslint-config-prettier@10.1.1: - resolution: {integrity: sha512-4EQQr6wXwS+ZJSzaR5ZCrYgLxqvUjdXctaEtBqHcbkW944B1NQyO4qpdHQbXBONfwxXdkAY81HH4+LUfrg+zPw==} + eslint-compat-utils@0.6.5: + resolution: {integrity: sha512-vAUHYzue4YAa2hNACjB8HvUQj5yehAZgiClyFVVom9cP8z5NSFq3PwB/TtJslN2zAMgRX6FCFCjYBbQh71g5RQ==} + engines: {node: '>=12'} + peerDependencies: + eslint: '>=6.0.0' + + eslint-config-prettier@10.1.2: + resolution: {integrity: sha512-Epgp/EofAUeEpIdZkW60MHKvPyru1ruQJxPL+WIycnaPApuseK0Zpkrh/FwL9oIpQvIhJwV7ptOy0DWUjTlCiA==} hasBin: true peerDependencies: eslint: '>=7.0.0' + eslint-json-compat-utils@0.2.1: + resolution: {integrity: sha512-YzEodbDyW8DX8bImKhAcCeu/L31Dd/70Bidx2Qex9OFUtgzXLqtfWL4Hr5fM/aCCB8QUZLuJur0S9k6UfgFkfg==} + engines: {node: '>=12'} + peerDependencies: + '@eslint/json': '*' + eslint: '*' + jsonc-eslint-parser: ^2.4.0 + peerDependenciesMeta: + '@eslint/json': + optional: true + + eslint-plugin-i18next@6.1.1: + resolution: {integrity: sha512-/Vy6BfX44njxpRnbJm7bbph0KaNJF2eillqN5W+u03hHuxmh9BjtjdPSrI9HPtyoEbG4j5nBn9gXm/dg99mz3Q==} + engines: {node: '>=0.10.0'} + + eslint-plugin-jsonc@2.20.1: + resolution: {integrity: sha512-gUzIwQHXx7ZPypUoadcyRi4WbHW2TPixDr0kqQ4miuJBU0emJmyGTlnaT3Og9X2a8R1CDayN9BFSq5weGWbTng==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: '>=6.0.0' + eslint-plugin-jsx-a11y@6.10.2: resolution: {integrity: sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==} engines: {node: '>=4.0'} peerDependencies: eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9 - eslint-plugin-perfectionist@4.10.1: - resolution: {integrity: sha512-GXwFfL47RfBLZRGQdrvGZw9Ali2T2GPW8p4Gyj2fyWQ9396R/HgJMf0m9kn7D6WXRwrINfTDGLS+QYIeok9qEg==} + eslint-plugin-perfectionist@4.12.3: + resolution: {integrity: sha512-V0dmpq6fBbn0BYofHsiRuuY9wgkKMDkdruM0mIRBIJ8XZ8vEaTAZqFsywm40RuWNVnduWBt5HO1ZZ+flE2yqjg==} engines: {node: ^18.0.0 || >=20.0.0} peerDependencies: eslint: '>=8.45.0' - eslint-plugin-prettier@5.2.3: - resolution: {integrity: sha512-qJ+y0FfCp/mQYQ/vWQ3s7eUlFEL4PyKfAJxsnYTJ4YT73nsJBWqmEpFryxV9OeUiqmsTsYJ5Y+KDNaeP31wrRw==} + eslint-plugin-prettier@5.4.0: + resolution: {integrity: sha512-BvQOvUhkVQM1i63iMETK9Hjud9QhqBnbtT1Zc642p9ynzBuCe5pybkOnvqZIBypXmMlsGcnU4HZ8sCTPfpAexA==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: '@types/eslint': '>=8.0.0' eslint: '>=8.0.0' - eslint-config-prettier: '*' + eslint-config-prettier: '>= 7.0.0 <10.0.0 || >=10.1.0' prettier: '>=3.0.0' peerDependenciesMeta: '@types/eslint': @@ -2260,13 +2424,13 @@ packages: peerDependencies: eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 - eslint-plugin-react-refresh@0.4.19: - resolution: {integrity: sha512-eyy8pcr/YxSYjBoqIFSrlbn9i/xvxUFa8CjzAYo9cFjgGXqq1hyjihcpZvxRLalpaWmueWR81xn7vuKmAFijDQ==} + eslint-plugin-react-refresh@0.4.20: + resolution: {integrity: sha512-XpbHQ2q5gUF8BGOX4dHe+71qoirYMhApEPZ7sfhF/dNnOF1UXnCMGZf79SFTBO7Bz5YEIT4TMieSlJBWhP9WBA==} peerDependencies: eslint: '>=8.40' - eslint-plugin-react@7.37.4: - resolution: {integrity: sha512-BGP0jRmfYyvOyvMoRX/uoUeW+GqNj9y16bPQzqAHf3AYII/tDs+jMN0dBVkl88/OZwNGwrVFxE7riHsXVfy/LQ==} + eslint-plugin-react@7.37.5: + resolution: {integrity: sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==} engines: {node: '>=4'} peerDependencies: eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7 @@ -2289,8 +2453,8 @@ packages: resolution: {integrity: sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - eslint@9.23.0: - resolution: {integrity: sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw==} + eslint@9.26.0: + resolution: {integrity: sha512-Hx0MOjPh6uK9oq9nVsATZKE/Wlbai7KFjfCuw9UHaguDW3x+HF0O5nIi3ud39TWgrTjTO5nHxmL3R1eANinWHQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} hasBin: true peerDependencies: @@ -2303,6 +2467,10 @@ packages: resolution: {integrity: sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + espree@9.6.1: + resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + esprima@4.0.1: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} engines: {node: '>=4'} @@ -2330,10 +2498,32 @@ packages: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} + etag@1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + + eventsource-parser@3.0.1: + resolution: {integrity: sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==} + engines: {node: '>=18.0.0'} + + eventsource@3.0.6: + resolution: {integrity: sha512-l19WpE2m9hSuyP06+FbuUUf1G+R0SFLrtQfbRb9PRr+oimOfxQhgGCbVaXg5IvZyyTThJsxh6L/srkMiCeBPDA==} + engines: {node: '>=18.0.0'} + expect-type@1.2.0: resolution: {integrity: sha512-80F22aiJ3GLyVnS/B3HzgR6RelZVumzj9jkL0Rhz4h0xYbNW9PjlQz5h3J/SShErbXBc295vseR4/MIbVmUbeA==} engines: {node: '>=12.0.0'} + express-rate-limit@7.5.0: + resolution: {integrity: sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==} + engines: {node: '>= 16'} + peerDependencies: + express: ^4.11 || 5 || ^5.0.0-beta.1 + + express@5.1.0: + resolution: {integrity: sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==} + engines: {node: '>= 18'} + extend@3.0.2: resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} @@ -2379,6 +2569,10 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} + finalhandler@2.1.0: + resolution: {integrity: sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==} + engines: {node: '>= 0.8'} + find-root@1.1.0: resolution: {integrity: sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==} @@ -2427,6 +2621,14 @@ packages: resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} engines: {node: '>=0.4.x'} + forwarded@0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + + fresh@2.0.0: + resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==} + engines: {node: '>= 0.8'} + fs-minipass@2.1.0: resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} engines: {node: '>= 8'} @@ -2515,8 +2717,8 @@ packages: engines: {node: '>=0.4.7'} hasBin: true - happy-dom@17.4.4: - resolution: {integrity: sha512-/Pb0ctk3HTZ5xEL3BZ0hK1AqDSAUuRQitOmROPHhfUYEWpmTImwfD8vFDGADmMAX0JYgbcgxWoLFKtsWhcpuVA==} + happy-dom@17.4.6: + resolution: {integrity: sha512-OEV1hDe9i2rFr66+WZNiwy1S8rAJy6bRXmXql68YJDjdfHBRbN76om+qVh68vQACf6y5Bcr90e/oK53RQxsDdg==} engines: {node: '>=18.0.0'} has-bigints@1.1.0: @@ -2576,12 +2778,37 @@ packages: html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + html-parse-stringify@3.0.1: + resolution: {integrity: sha512-KknJ50kTInJ7qIScF3jeaFRpMpE8/lfiTdzf/twXyPBLAGrLRTmkz3AdTnKeh40X8k9L2fdYwEp/42WGXIRGcg==} + html-to-image@1.11.13: resolution: {integrity: sha512-cuOPoI7WApyhBElTTb9oqsawRvZ0rHhaHwghRLlTuffoD1B2aDemlCruLeZrUIIdvG7gs9xeELEPm6PhuASqrg==} html-url-attributes@3.0.1: resolution: {integrity: sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==} + http-errors@2.0.0: + resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} + engines: {node: '>= 0.8'} + + i18next-browser-languagedetector@8.1.0: + resolution: {integrity: sha512-mHZxNx1Lq09xt5kCauZ/4bsXOEA2pfpwSoU11/QTJB+pD94iONFwp+ohqi///PwiFvjFOxe1akYCdHyFo1ng5Q==} + + i18next-http-backend@3.0.2: + resolution: {integrity: sha512-PdlvPnvIp4E1sYi46Ik4tBYh/v/NbYfFFgTjkwFl0is8A18s7/bx9aXqsrOax9WUbeNS6mD2oix7Z0yGGf6m5g==} + + i18next@25.1.2: + resolution: {integrity: sha512-SP63m8LzdjkrAjruH7SCI3ndPSgjt4/wX7ouUUOzCW/eY+HzlIo19IQSfYA9X3qRiRP1SYtaTsg/Oz/PGsfD8w==} + peerDependencies: + typescript: ^5 + peerDependenciesMeta: + typescript: + optional: true + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + ignore@5.3.2: resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} engines: {node: '>= 4'} @@ -2598,6 +2825,9 @@ packages: resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} engines: {node: '>=8'} + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + inline-style-parser@0.2.4: resolution: {integrity: sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==} @@ -2609,6 +2839,10 @@ packages: resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} engines: {node: '>=12'} + ipaddr.js@1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + is-alphabetical@1.0.4: resolution: {integrity: sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==} @@ -2715,6 +2949,9 @@ packages: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} + is-promise@4.0.0: + resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + is-regex@1.2.1: resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==} engines: {node: '>= 0.4'} @@ -2831,6 +3068,10 @@ packages: json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + jsonc-eslint-parser@2.4.0: + resolution: {integrity: sha512-WYDyuc/uFcGp6YtM2H0uKmUwieOuzeE/5YocFJLnLfclZ4inf3mRn8ZVy1s7Hxji7Jxm6Ss8gqpexD/GlKoGgg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + jsonpointer@5.0.1: resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} engines: {node: '>=0.10.0'} @@ -2965,9 +3206,17 @@ packages: mdast-util-to-string@4.0.0: resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==} + media-typer@1.1.0: + resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==} + engines: {node: '>= 0.8'} + memoize-one@6.0.0: resolution: {integrity: sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==} + merge-descriptors@2.0.0: + resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==} + engines: {node: '>=18'} + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -3064,10 +3313,18 @@ packages: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} + mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} + mime-types@3.0.1: + resolution: {integrity: sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==} + engines: {node: '>= 0.6'} + min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} @@ -3118,8 +3375,8 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - msw@2.7.3: - resolution: {integrity: sha512-+mycXv8l2fEAjFZ5sjrtjJDmm2ceKGjrNbBr1durRg6VkU9fNUE/gsmQ51hWbHqs+l35W1iM+ZsmOD9Fd6lspw==} + msw@2.7.6: + resolution: {integrity: sha512-P+rwn43ktxN8ghcl8q+hSAUlEi0PbJpDhGmDkw4zeUnRj3hBCVynWD+dTu38yLYKCE9ZF1OYcvpy7CTBRcqkZA==} engines: {node: '>=18'} hasBin: true peerDependencies: @@ -3144,6 +3401,10 @@ packages: resolution: {integrity: sha512-kKHJhxwpR/Okycz4HhQKKlhWe4ASEfPgkSWNmKFHd7+ezuQlxkA5cM3+XkBPvm1gmHen3w53qsYAv+8GwRrBlg==} engines: {node: '>=18'} + negotiator@1.0.0: + resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} + engines: {node: '>= 0.6'} + neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} @@ -3168,6 +3429,10 @@ packages: node-releases@2.0.19: resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} + node-sql-parser@5.3.10: + resolution: {integrity: sha512-cf+iXXJ9Foz4hBIu+eNNeg207ac6XruA9I9DXEs+jCxeS9t/k9T0GZK8NZngPwkv+P26i3zNFj9jxJU2v3pJnw==} + engines: {node: '>=8'} + normalize-package-data@2.5.0: resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} @@ -3211,6 +3476,13 @@ packages: ohash@1.1.6: resolution: {integrity: sha512-TBu7PtV8YkAZn0tSxobKY2n2aAQva936lhRrj6957aDaCf9IEtqsKbgMzXE/F/sjqYOwmrukeORHNLe5glk7Cg==} + on-finished@2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + openapi-merge-cli@1.3.2: resolution: {integrity: sha512-f2F5SRwZpcxvEpMm776Lh2XW9XqNuRedMzlCA0usx9JgQmZUhtC24px4zFYc5J/ZYXvNG6mdqdj/D0BnfEy6AA==} hasBin: true @@ -3266,6 +3538,10 @@ packages: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} + parseurl@1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + path-browserify@1.0.1: resolution: {integrity: sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==} @@ -3291,6 +3567,10 @@ packages: path-to-regexp@6.3.0: resolution: {integrity: sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==} + path-to-regexp@8.2.0: + resolution: {integrity: sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==} + engines: {node: '>=16'} + path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} @@ -3322,6 +3602,10 @@ packages: resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} engines: {node: '>=12'} + pkce-challenge@5.0.0: + resolution: {integrity: sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==} + engines: {node: '>=16.20.0'} + pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} @@ -3374,6 +3658,10 @@ packages: property-information@7.0.0: resolution: {integrity: sha512-7D/qOz/+Y4X/rzSB6jKxKUsQnphO046ei8qxG59mtM3RG3DHgTK81HrxrmoDVINJb8NKT5ZsRbwHvQ6B68Iyhg==} + proxy-addr@2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + proxy-compare@3.0.1: resolution: {integrity: sha512-V9plBAt3qjMlS1+nC8771KNf6oJ12gExvaxnNzN/9yVRLdTv/lc+oJlnSzrdYDAvBfTStPCoiaCOTmTs0adv7Q==} @@ -3393,12 +3681,24 @@ packages: pure-color@1.3.0: resolution: {integrity: sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==} + qs@6.14.0: + resolution: {integrity: sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==} + engines: {node: '>=0.6'} + querystringify@2.2.0: resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + range-parser@1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + + raw-body@3.0.0: + resolution: {integrity: sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==} + engines: {node: '>= 0.8'} + rc9@2.1.2: resolution: {integrity: sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==} @@ -3416,8 +3716,8 @@ packages: peerDependencies: react: ^18.3.1 - react-hook-form@7.54.2: - resolution: {integrity: sha512-eHpAUgUjWbZocoQYUHposymRb4ZP6d0uwUnooL2uOybA9/3tPUvoAKqEWK1WaSiTxxOfTpffNZP7QwlnM3/gEg==} + react-hook-form@7.56.2: + resolution: {integrity: sha512-vpfuHuQMF/L6GpuQ4c3ZDo+pRYxIi40gQqsCmmfUBwm+oqvBhKhwghCuj2o00YCgSfU6bR9KC/xnQGWm3Gr08A==} engines: {node: '>=18.0.0'} peerDependencies: react: ^16.8.0 || ^17 || ^18 || ^19 @@ -3428,6 +3728,22 @@ packages: react: '>=16.8.1' react-dom: '>=16.8.1' + react-i18next@15.5.1: + resolution: {integrity: sha512-C8RZ7N7H0L+flitiX6ASjq9p5puVJU1Z8VyL3OgM/QOMRf40BMZX+5TkpxzZVcTmOLPX5zlti4InEX5pFyiVeA==} + peerDependencies: + i18next: '>= 23.2.3' + react: '>= 16.8.0' + react-dom: '*' + react-native: '*' + typescript: ^5 + peerDependenciesMeta: + react-dom: + optional: true + react-native: + optional: true + typescript: + optional: true + react-icons@5.5.0: resolution: {integrity: sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw==} peerDependencies: @@ -3560,6 +3876,10 @@ packages: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} + requireindex@1.1.0: + resolution: {integrity: sha512-LBnkqsDE7BZKvqylbmn7lTIVdpx4K/QCduRATpO5R+wtPmky/a8pN1bO2D6wXppn1497AJF9mNjqAXr6bdl9jg==} + engines: {node: '>=0.10.5'} + requires-port@1.0.0: resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} @@ -3583,11 +3903,15 @@ packages: robust-predicates@3.0.2: resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} - rollup@4.39.0: - resolution: {integrity: sha512-thI8kNc02yNvnmJp8dr3fNWJ9tCONDhp6TV35X6HkKGGs9E6q7YWCHbe5vKiTa7TAiNcFEmXKj3X/pG2b3ci0g==} + rollup@4.40.1: + resolution: {integrity: sha512-C5VvvgCCyfyotVITIAv+4efVytl5F7wt+/I2i9q9GZcEXW9BP52YYOXC58igUi+LFZVHukErIIqQSWwv/M3WRw==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + router@2.2.0: + resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} + engines: {node: '>= 18'} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -3595,6 +3919,9 @@ packages: resolution: {integrity: sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==} engines: {node: '>=0.4'} + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + safe-push-apply@1.0.0: resolution: {integrity: sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==} engines: {node: '>= 0.4'} @@ -3603,6 +3930,9 @@ packages: resolution: {integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==} engines: {node: '>= 0.4'} + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + scheduler@0.23.2: resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} @@ -3619,6 +3949,14 @@ packages: engines: {node: '>=10'} hasBin: true + send@1.2.0: + resolution: {integrity: sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==} + engines: {node: '>= 18'} + + serve-static@2.2.0: + resolution: {integrity: sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==} + engines: {node: '>= 18'} + set-function-length@1.2.2: resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} engines: {node: '>= 0.4'} @@ -3634,6 +3972,9 @@ packages: setimmediate@1.0.5: resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==} + setprototypeof@1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} @@ -3781,8 +4122,12 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} - synckit@0.9.2: - resolution: {integrity: sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw==} + synckit@0.11.4: + resolution: {integrity: sha512-Q/XQKRaJiLiFIBNN+mndW7S/RHxvwzuZS6ZwmRzUBqJBv/5QIKCEwkBC8GBf8EQJKYnaFs0wOZbKTXBPj8L9oQ==} + engines: {node: ^14.18.0 || >=16.0.0} + + synckit@0.11.8: + resolution: {integrity: sha512-+XZ+r1XGIJGeQk3VvXhT6xx/VpbHsRzsTkGgF6E5RX9TTXD0118l87puaEBZ566FhqblC6U0d4XnubznJDm30A==} engines: {node: ^14.18.0 || >=16.0.0} tar@6.2.1: @@ -3819,6 +4164,10 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} + toidentifier@1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + tough-cookie@4.1.4: resolution: {integrity: sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==} engines: {node: '>=6'} @@ -3867,6 +4216,10 @@ packages: resolution: {integrity: sha512-S/5/0kFftkq27FPNye0XM1e2NsnoD/3FS+pBmbjmmtLT6I+i344KoOf7pvXreaFsDamWeaJX55nczA1m5PsBDg==} engines: {node: '>=16'} + type-is@2.0.1: + resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} + engines: {node: '>= 0.6'} + typed-array-buffer@1.0.3: resolution: {integrity: sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==} engines: {node: '>= 0.4'} @@ -3883,15 +4236,15 @@ packages: resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} engines: {node: '>= 0.4'} - typescript-eslint@8.27.0: - resolution: {integrity: sha512-ZZ/8+Y0rRUMuW1gJaPtLWe4ryHbsPLzzibk5Sq+IFa2aOH1Vo0gPr1fbA6pOnzBke7zC2Da4w8AyCgxKXo3lqA==} + typescript-eslint@8.32.0: + resolution: {integrity: sha512-UMq2kxdXCzinFFPsXc9o2ozIpYCCOiEC46MG3yEh5Vipq6BO27otTtEBZA1fQ66DulEUgE97ucQ/3YY66CPg0A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - typescript@5.5.4: - resolution: {integrity: sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==} + typescript@5.8.3: + resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} engines: {node: '>=14.17'} hasBin: true @@ -3911,8 +4264,8 @@ packages: resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} engines: {node: '>= 0.4'} - undici-types@6.20.0: - resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} unified@11.0.5: resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} @@ -3936,6 +4289,10 @@ packages: resolution: {integrity: sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==} engines: {node: '>= 4.0.0'} + unpipe@1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + update-browserslist-db@1.1.3: resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} hasBin: true @@ -4001,6 +4358,10 @@ packages: validate-npm-package-license@3.0.4: resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} + vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + vfile-message@4.0.2: resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==} @@ -4017,8 +4378,8 @@ packages: peerDependencies: vite: '>2.0.0-0' - vite@5.4.17: - resolution: {integrity: sha512-5+VqZryDj4wgCs55o9Lp+p8GE78TLVg0lasCH5xFZ4jacZjtqZa6JUw9/p0WeAojaOfncSM6v77InkFPGnvPvg==} + vite@5.4.19: + resolution: {integrity: sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==} engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: @@ -4073,6 +4434,10 @@ packages: jsdom: optional: true + void-elements@3.1.0: + resolution: {integrity: sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==} + engines: {node: '>=0.10.0'} + w3c-keyname@2.2.8: resolution: {integrity: sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==} @@ -4141,6 +4506,9 @@ packages: resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} engines: {node: '>=12'} + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} @@ -4156,6 +4524,11 @@ packages: resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} engines: {node: '>= 6'} + yaml@2.8.0: + resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==} + engines: {node: '>= 14.6'} + hasBin: true + yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} @@ -4172,6 +4545,14 @@ packages: resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==} engines: {node: '>=18'} + zod-to-json-schema@3.24.5: + resolution: {integrity: sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==} + peerDependencies: + zod: ^3.24.1 + + zod@3.24.4: + resolution: {integrity: sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg==} + zustand@4.5.6: resolution: {integrity: sha512-ibr/n1hBzLLj5Y+yUcU7dYw8p6WnIVzdJbnX+1YpaScvZVF2ziugqHs+LAmHw4lWO9c/zRj+K1ncgWDQuthEdQ==} engines: {node: '>=12.7.0'} @@ -4187,8 +4568,8 @@ packages: react: optional: true - zustand@5.0.3: - resolution: {integrity: sha512-14fwWQtU3pH4dE0dOpdMiWjddcH+QzKIgk1cl8epwSE7yag43k/AD/m4L6+K7DytAOr9gGBe3/EXj9g7cdostg==} + zustand@5.0.4: + resolution: {integrity: sha512-39VFTN5InDtMd28ZhjLyuTnlytDr9HfwO512Ai4I8ZABCoyAj4F1+sr7sD1jP/+p7k77Iko0Pb5NhgBFDCX0kQ==} engines: {node: '>=12.20.0'} peerDependencies: '@types/react': '>=18.0.0' @@ -4210,13 +4591,13 @@ packages: snapshots: - '@7nohe/openapi-react-query-codegen@1.6.2(commander@12.1.0)(glob@11.0.0)(magicast@0.3.5)(ts-morph@23.0.0)(typescript@5.5.4)': + '@7nohe/openapi-react-query-codegen@1.6.2(commander@12.1.0)(glob@11.0.0)(magicast@0.3.5)(ts-morph@23.0.0)(typescript@5.8.3)': dependencies: - '@hey-api/openapi-ts': 0.52.0(magicast@0.3.5)(typescript@5.5.4) + '@hey-api/openapi-ts': 0.52.0(magicast@0.3.5)(typescript@5.8.3) commander: 12.1.0 glob: 11.0.0 ts-morph: 23.0.0 - typescript: 5.5.4 + typescript: 5.8.3 transitivePeerDependencies: - magicast @@ -4233,62 +4614,66 @@ snapshots: '@types/json-schema': 7.0.15 js-yaml: 4.1.0 - '@ark-ui/react@5.5.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@internationalized/date': 3.7.0 - '@zag-js/accordion': 1.8.2 - '@zag-js/anatomy': 1.8.2 - '@zag-js/auto-resize': 1.8.2 - '@zag-js/avatar': 1.8.2 - '@zag-js/carousel': 1.8.2 - '@zag-js/checkbox': 1.8.2 - '@zag-js/clipboard': 1.8.2 - '@zag-js/collapsible': 1.8.2 - '@zag-js/collection': 1.8.2 - '@zag-js/color-picker': 1.8.2 - '@zag-js/color-utils': 1.8.2 - '@zag-js/combobox': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/date-picker': 1.8.2(@internationalized/date@3.7.0) - '@zag-js/date-utils': 1.8.2(@internationalized/date@3.7.0) - '@zag-js/dialog': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/editable': 1.8.2 - '@zag-js/file-upload': 1.8.2 - '@zag-js/file-utils': 1.8.1 - '@zag-js/focus-trap': 1.8.2 - '@zag-js/highlight-word': 1.8.2 - '@zag-js/hover-card': 1.8.2 - '@zag-js/i18n-utils': 1.8.2 - '@zag-js/menu': 1.8.2 - '@zag-js/number-input': 1.8.2 - '@zag-js/pagination': 1.8.2 - '@zag-js/pin-input': 1.8.2 - '@zag-js/popover': 1.8.2 - '@zag-js/presence': 1.8.2 - '@zag-js/progress': 1.8.2 - '@zag-js/qr-code': 1.8.2 - '@zag-js/radio-group': 1.8.2 - '@zag-js/rating-group': 1.8.2 - '@zag-js/react': 1.8.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@zag-js/select': 1.8.2 - '@zag-js/signature-pad': 1.8.2 - '@zag-js/slider': 1.8.2 - '@zag-js/splitter': 1.8.2 - '@zag-js/steps': 1.8.2 - '@zag-js/switch': 1.8.2 - '@zag-js/tabs': 1.8.2 - '@zag-js/tags-input': 1.8.2 - '@zag-js/time-picker': 1.8.2(@internationalized/date@3.7.0) - '@zag-js/timer': 1.8.2 - '@zag-js/toast': 1.8.2 - '@zag-js/toggle': 1.8.2 - '@zag-js/toggle-group': 1.8.2 - '@zag-js/tooltip': 1.8.2 - '@zag-js/tour': 1.8.2 - '@zag-js/tree-view': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@ark-ui/react@5.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@internationalized/date': 3.8.1 + '@zag-js/accordion': 1.15.0 + '@zag-js/anatomy': 1.15.0 + '@zag-js/angle-slider': 1.15.0 + '@zag-js/auto-resize': 1.15.0 + '@zag-js/avatar': 1.15.0 + '@zag-js/carousel': 1.15.0 + '@zag-js/checkbox': 1.15.0 + '@zag-js/clipboard': 1.15.0 + '@zag-js/collapsible': 1.15.0 + '@zag-js/collection': 1.15.0 + '@zag-js/color-picker': 1.15.0 + '@zag-js/color-utils': 1.15.0 + '@zag-js/combobox': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/date-picker': 1.15.0(@internationalized/date@3.8.1) + '@zag-js/date-utils': 1.15.0(@internationalized/date@3.8.1) + '@zag-js/dialog': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/editable': 1.15.0 + '@zag-js/file-upload': 1.15.0 + '@zag-js/file-utils': 1.15.0 + '@zag-js/floating-panel': 1.15.0 + '@zag-js/focus-trap': 1.15.0 + '@zag-js/highlight-word': 1.15.0 + '@zag-js/hover-card': 1.15.0 + '@zag-js/i18n-utils': 1.15.0 + '@zag-js/listbox': 1.15.0 + '@zag-js/menu': 1.15.0 + '@zag-js/number-input': 1.15.0 + '@zag-js/pagination': 1.15.0 + '@zag-js/password-input': 1.15.0 + '@zag-js/pin-input': 1.15.0 + '@zag-js/popover': 1.15.0 + '@zag-js/presence': 1.15.0 + '@zag-js/progress': 1.15.0 + '@zag-js/qr-code': 1.15.0 + '@zag-js/radio-group': 1.15.0 + '@zag-js/rating-group': 1.15.0 + '@zag-js/react': 1.15.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@zag-js/select': 1.15.0 + '@zag-js/signature-pad': 1.15.0 + '@zag-js/slider': 1.15.0 + '@zag-js/splitter': 1.15.0 + '@zag-js/steps': 1.15.0 + '@zag-js/switch': 1.15.0 + '@zag-js/tabs': 1.15.0 + '@zag-js/tags-input': 1.15.0 + '@zag-js/time-picker': 1.15.0(@internationalized/date@3.8.1) + '@zag-js/timer': 1.15.0 + '@zag-js/toast': 1.15.0 + '@zag-js/toggle': 1.15.0 + '@zag-js/toggle-group': 1.15.0 + '@zag-js/tooltip': 1.15.0 + '@zag-js/tour': 1.15.0 + '@zag-js/tree-view': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) @@ -4298,6 +4683,12 @@ snapshots: js-tokens: 4.0.0 picocolors: 1.1.1 + '@babel/code-frame@7.27.1': + dependencies: + '@babel/helper-validator-identifier': 7.27.1 + js-tokens: 4.0.0 + picocolors: 1.1.1 + '@babel/generator@7.17.7': dependencies: '@babel/types': 7.17.0 @@ -4340,6 +4731,8 @@ snapshots: '@babel/helper-validator-identifier@7.25.9': {} + '@babel/helper-validator-identifier@7.27.1': {} + '@babel/parser@7.26.10': dependencies: '@babel/types': 7.26.10 @@ -4408,15 +4801,15 @@ snapshots: '@chakra-ui/anatomy@2.3.4': {} - '@chakra-ui/react@3.15.1(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@chakra-ui/react@3.20.0(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - '@ark-ui/react': 5.5.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@ark-ui/react': 5.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@emotion/is-prop-valid': 1.3.1 '@emotion/react': 11.14.0(@types/react@18.3.19)(react@18.3.1) '@emotion/serialize': 1.3.3 '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@18.3.1) '@emotion/utils': 1.4.2 - '@pandacss/is-valid-prop': 0.41.0 + '@pandacss/is-valid-prop': 0.53.6 csstype: 3.1.3 fast-safe-stringify: 2.1.1 react: 18.3.1 @@ -4429,13 +4822,6 @@ snapshots: '@codemirror/view': 6.36.4 '@lezer/common': 1.2.3 - '@codemirror/commands@6.8.0': - dependencies: - '@codemirror/language': 6.11.0 - '@codemirror/state': 6.5.2 - '@codemirror/view': 6.36.4 - '@lezer/common': 1.2.3 - '@codemirror/commands@6.8.1': dependencies: '@codemirror/language': 6.11.0 @@ -4623,18 +5009,23 @@ snapshots: '@esbuild/win32-x64@0.21.5': optional: true - '@eslint-community/eslint-utils@4.5.1(eslint@9.23.0(jiti@1.21.7))': + '@eslint-community/eslint-utils@4.5.1(eslint@9.26.0(jiti@1.21.7))': + dependencies: + eslint: 9.26.0(jiti@1.21.7) + eslint-visitor-keys: 3.4.3 + + '@eslint-community/eslint-utils@4.7.0(eslint@9.26.0(jiti@1.21.7))': dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) eslint-visitor-keys: 3.4.3 '@eslint-community/regexpp@4.12.1': {} - '@eslint/compat@1.2.7(eslint@9.23.0(jiti@1.21.7))': + '@eslint/compat@1.2.9(eslint@9.26.0(jiti@1.21.7))': optionalDependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) - '@eslint/config-array@0.19.2': + '@eslint/config-array@0.20.0': dependencies: '@eslint/object-schema': 2.1.6 debug: 4.4.0 @@ -4642,9 +5033,9 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/config-helpers@0.2.0': {} + '@eslint/config-helpers@0.2.2': {} - '@eslint/core@0.12.0': + '@eslint/core@0.13.0': dependencies: '@types/json-schema': 7.0.15 @@ -4662,34 +5053,43 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/js@9.23.0': {} + '@eslint/js@9.26.0': {} '@eslint/object-schema@2.1.6': {} - '@eslint/plugin-kit@0.2.7': + '@eslint/plugin-kit@0.2.8': dependencies: - '@eslint/core': 0.12.0 + '@eslint/core': 0.13.0 levn: 0.4.1 '@floating-ui/core@1.6.9': dependencies: '@floating-ui/utils': 0.2.9 + '@floating-ui/core@1.7.1': + dependencies: + '@floating-ui/utils': 0.2.9 + '@floating-ui/dom@1.6.13': dependencies: '@floating-ui/core': 1.6.9 '@floating-ui/utils': 0.2.9 + '@floating-ui/dom@1.7.1': + dependencies: + '@floating-ui/core': 1.7.1 + '@floating-ui/utils': 0.2.9 + '@floating-ui/utils@0.2.9': {} - '@hey-api/openapi-ts@0.52.0(magicast@0.3.5)(typescript@5.5.4)': + '@hey-api/openapi-ts@0.52.0(magicast@0.3.5)(typescript@5.8.3)': dependencies: '@apidevtools/json-schema-ref-parser': 11.6.4 c12: 1.11.1(magicast@0.3.5) camelcase: 8.0.0 commander: 12.1.0 handlebars: 4.7.8 - typescript: 5.5.4 + typescript: 5.8.3 transitivePeerDependencies: - magicast @@ -4706,17 +5106,17 @@ snapshots: '@humanwhocodes/retry@0.4.2': {} - '@inquirer/confirm@5.1.8(@types/node@22.13.11)': + '@inquirer/confirm@5.1.8(@types/node@22.15.14)': dependencies: - '@inquirer/core': 10.1.9(@types/node@22.13.11) - '@inquirer/type': 3.0.5(@types/node@22.13.11) + '@inquirer/core': 10.1.9(@types/node@22.15.14) + '@inquirer/type': 3.0.5(@types/node@22.15.14) optionalDependencies: - '@types/node': 22.13.11 + '@types/node': 22.15.14 - '@inquirer/core@10.1.9(@types/node@22.13.11)': + '@inquirer/core@10.1.9(@types/node@22.15.14)': dependencies: '@inquirer/figures': 1.0.11 - '@inquirer/type': 3.0.5(@types/node@22.13.11) + '@inquirer/type': 3.0.5(@types/node@22.15.14) ansi-escapes: 4.3.2 cli-width: 4.1.0 mute-stream: 2.0.0 @@ -4724,19 +5124,19 @@ snapshots: wrap-ansi: 6.2.0 yoctocolors-cjs: 2.1.2 optionalDependencies: - '@types/node': 22.13.11 + '@types/node': 22.15.14 '@inquirer/figures@1.0.11': {} - '@inquirer/type@3.0.5(@types/node@22.13.11)': + '@inquirer/type@3.0.5(@types/node@22.15.14)': optionalDependencies: - '@types/node': 22.13.11 + '@types/node': 22.15.14 - '@internationalized/date@3.7.0': + '@internationalized/date@3.8.1': dependencies: '@swc/helpers': 0.5.15 - '@internationalized/number@3.6.0': + '@internationalized/number@3.6.2': dependencies: '@swc/helpers': 0.5.15 @@ -4790,6 +5190,21 @@ snapshots: '@marijn/find-cluster-break@1.0.2': {} + '@modelcontextprotocol/sdk@1.11.0': + dependencies: + content-type: 1.0.5 + cors: 2.8.5 + cross-spawn: 7.0.6 + eventsource: 3.0.6 + express: 5.1.0 + express-rate-limit: 7.5.0(express@5.1.0) + pkce-challenge: 5.0.0 + raw-body: 3.0.0 + zod: 3.24.4 + zod-to-json-schema: 3.24.5(zod@3.24.4) + transitivePeerDependencies: + - supports-color + '@mswjs/interceptors@0.37.6': dependencies: '@open-draft/deferred-promise': 2.2.0 @@ -4820,79 +5235,79 @@ snapshots: '@open-draft/until@2.1.0': {} - '@pandacss/is-valid-prop@0.41.0': {} + '@pandacss/is-valid-prop@0.53.6': {} '@pkgjs/parseargs@0.11.0': optional: true - '@pkgr/core@0.1.2': {} + '@pkgr/core@0.2.4': {} '@remix-run/router@1.23.0': {} - '@rollup/rollup-android-arm-eabi@4.39.0': + '@rollup/rollup-android-arm-eabi@4.40.1': optional: true - '@rollup/rollup-android-arm64@4.39.0': + '@rollup/rollup-android-arm64@4.40.1': optional: true - '@rollup/rollup-darwin-arm64@4.39.0': + '@rollup/rollup-darwin-arm64@4.40.1': optional: true - '@rollup/rollup-darwin-x64@4.39.0': + '@rollup/rollup-darwin-x64@4.40.1': optional: true - '@rollup/rollup-freebsd-arm64@4.39.0': + '@rollup/rollup-freebsd-arm64@4.40.1': optional: true - '@rollup/rollup-freebsd-x64@4.39.0': + '@rollup/rollup-freebsd-x64@4.40.1': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.39.0': + '@rollup/rollup-linux-arm-gnueabihf@4.40.1': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.39.0': + '@rollup/rollup-linux-arm-musleabihf@4.40.1': optional: true - '@rollup/rollup-linux-arm64-gnu@4.39.0': + '@rollup/rollup-linux-arm64-gnu@4.40.1': optional: true - '@rollup/rollup-linux-arm64-musl@4.39.0': + '@rollup/rollup-linux-arm64-musl@4.40.1': optional: true - '@rollup/rollup-linux-loongarch64-gnu@4.39.0': + '@rollup/rollup-linux-loongarch64-gnu@4.40.1': optional: true - '@rollup/rollup-linux-powerpc64le-gnu@4.39.0': + '@rollup/rollup-linux-powerpc64le-gnu@4.40.1': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.39.0': + '@rollup/rollup-linux-riscv64-gnu@4.40.1': optional: true - '@rollup/rollup-linux-riscv64-musl@4.39.0': + '@rollup/rollup-linux-riscv64-musl@4.40.1': optional: true - '@rollup/rollup-linux-s390x-gnu@4.39.0': + '@rollup/rollup-linux-s390x-gnu@4.40.1': optional: true - '@rollup/rollup-linux-x64-gnu@4.39.0': + '@rollup/rollup-linux-x64-gnu@4.40.1': optional: true - '@rollup/rollup-linux-x64-musl@4.39.0': + '@rollup/rollup-linux-x64-musl@4.40.1': optional: true - '@rollup/rollup-win32-arm64-msvc@4.39.0': + '@rollup/rollup-win32-arm64-msvc@4.40.1': optional: true - '@rollup/rollup-win32-ia32-msvc@4.39.0': + '@rollup/rollup-win32-ia32-msvc@4.40.1': optional: true - '@rollup/rollup-win32-x64-msvc@4.39.0': + '@rollup/rollup-win32-x64-msvc@4.40.1': optional: true - '@stylistic/eslint-plugin@2.13.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@stylistic/eslint-plugin@2.13.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@typescript-eslint/utils': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.27.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + eslint: 9.26.0(jiti@1.21.7) eslint-visitor-keys: 4.2.0 espree: 10.3.0 estraverse: 5.3.0 @@ -4901,52 +5316,51 @@ snapshots: - supports-color - typescript - '@swc/core-darwin-arm64@1.11.12': + '@swc/core-darwin-arm64@1.11.24': optional: true - '@swc/core-darwin-x64@1.11.12': + '@swc/core-darwin-x64@1.11.24': optional: true - '@swc/core-linux-arm-gnueabihf@1.11.12': + '@swc/core-linux-arm-gnueabihf@1.11.24': optional: true - '@swc/core-linux-arm64-gnu@1.11.12': + '@swc/core-linux-arm64-gnu@1.11.24': optional: true - '@swc/core-linux-arm64-musl@1.11.12': + '@swc/core-linux-arm64-musl@1.11.24': optional: true - '@swc/core-linux-x64-gnu@1.11.12': + '@swc/core-linux-x64-gnu@1.11.24': optional: true - '@swc/core-linux-x64-musl@1.11.12': + '@swc/core-linux-x64-musl@1.11.24': optional: true - '@swc/core-win32-arm64-msvc@1.11.12': + '@swc/core-win32-arm64-msvc@1.11.24': optional: true - '@swc/core-win32-ia32-msvc@1.11.12': + '@swc/core-win32-ia32-msvc@1.11.24': optional: true - '@swc/core-win32-x64-msvc@1.11.12': + '@swc/core-win32-x64-msvc@1.11.24': optional: true - '@swc/core@1.11.12(@swc/helpers@0.5.15)': + '@swc/core@1.11.24': dependencies: '@swc/counter': 0.1.3 - '@swc/types': 0.1.19 + '@swc/types': 0.1.21 optionalDependencies: - '@swc/core-darwin-arm64': 1.11.12 - '@swc/core-darwin-x64': 1.11.12 - '@swc/core-linux-arm-gnueabihf': 1.11.12 - '@swc/core-linux-arm64-gnu': 1.11.12 - '@swc/core-linux-arm64-musl': 1.11.12 - '@swc/core-linux-x64-gnu': 1.11.12 - '@swc/core-linux-x64-musl': 1.11.12 - '@swc/core-win32-arm64-msvc': 1.11.12 - '@swc/core-win32-ia32-msvc': 1.11.12 - '@swc/core-win32-x64-msvc': 1.11.12 - '@swc/helpers': 0.5.15 + '@swc/core-darwin-arm64': 1.11.24 + '@swc/core-darwin-x64': 1.11.24 + '@swc/core-linux-arm-gnueabihf': 1.11.24 + '@swc/core-linux-arm64-gnu': 1.11.24 + '@swc/core-linux-arm64-musl': 1.11.24 + '@swc/core-linux-x64-gnu': 1.11.24 + '@swc/core-linux-x64-musl': 1.11.24 + '@swc/core-win32-arm64-msvc': 1.11.24 + '@swc/core-win32-ia32-msvc': 1.11.24 + '@swc/core-win32-x64-msvc': 1.11.24 '@swc/counter@0.1.3': {} @@ -4954,36 +5368,44 @@ snapshots: dependencies: tslib: 2.8.1 - '@swc/types@0.1.19': + '@swc/types@0.1.21': dependencies: '@swc/counter': 0.1.3 - '@tanstack/eslint-plugin-query@5.68.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@tanstack/eslint-plugin-query@5.74.7(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@typescript-eslint/utils': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.28.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + eslint: 9.26.0(jiti@1.21.7) transitivePeerDependencies: - supports-color - typescript - '@tanstack/query-core@5.69.0': {} + '@tanstack/query-core@5.75.4': {} - '@tanstack/react-query@5.69.0(react@18.3.1)': + '@tanstack/react-query@5.75.4(react@18.3.1)': dependencies: - '@tanstack/query-core': 5.69.0 + '@tanstack/query-core': 5.75.4 react: 18.3.1 - '@tanstack/react-table@8.21.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@tanstack/react-table@8.21.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - '@tanstack/table-core': 8.21.2 + '@tanstack/table-core': 8.21.3 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@tanstack/table-core@8.21.2': {} + '@tanstack/react-virtual@3.13.8(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@tanstack/virtual-core': 3.13.8 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + + '@tanstack/table-core@8.21.3': {} + + '@tanstack/virtual-core@3.13.8': {} '@testing-library/dom@10.4.0': dependencies: - '@babel/code-frame': 7.26.2 + '@babel/code-frame': 7.27.1 '@babel/runtime': 7.26.10 '@types/aria-query': 5.0.4 aria-query: 5.3.0 @@ -5002,7 +5424,7 @@ snapshots: lodash: 4.17.21 redent: 3.0.0 - '@testing-library/react@16.2.0(@testing-library/dom@10.4.0)(@types/react-dom@18.3.5(@types/react@18.3.19))(@types/react@18.3.19)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@testing-library/react@16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@18.3.5(@types/react@18.3.19))(@types/react@18.3.19)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@babel/runtime': 7.26.10 '@testing-library/dom': 10.4.0 @@ -5120,14 +5542,16 @@ snapshots: '@types/ms@2.1.0': {} - '@types/node@22.13.11': + '@types/node@22.15.14': dependencies: - undici-types: 6.20.0 + undici-types: 6.21.0 '@types/normalize-package-data@2.4.4': {} '@types/parse-json@4.0.2': {} + '@types/pegjs@0.10.6': {} + '@types/prop-types@15.7.14': {} '@types/react-dom@18.3.5(@types/react@18.3.19)': @@ -5155,32 +5579,32 @@ snapshots: '@types/unist@3.0.3': {} - '@typescript-eslint/eslint-plugin@8.27.0(@typescript-eslint/parser@8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4))(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/eslint-plugin@8.32.0(@typescript-eslint/parser@8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/scope-manager': 8.27.0 - '@typescript-eslint/type-utils': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/utils': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/visitor-keys': 8.27.0 - eslint: 9.23.0(jiti@1.21.7) + '@typescript-eslint/parser': 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/scope-manager': 8.32.0 + '@typescript-eslint/type-utils': 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.32.0 + eslint: 9.26.0(jiti@1.21.7) graphemer: 1.4.0 ignore: 5.3.2 natural-compare: 1.4.0 - ts-api-utils: 2.1.0(typescript@5.5.4) - typescript: 5.5.4 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/parser@8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@typescript-eslint/scope-manager': 8.27.0 - '@typescript-eslint/types': 8.27.0 - '@typescript-eslint/typescript-estree': 8.27.0(typescript@5.5.4) - '@typescript-eslint/visitor-keys': 8.27.0 + '@typescript-eslint/scope-manager': 8.32.0 + '@typescript-eslint/types': 8.32.0 + '@typescript-eslint/typescript-estree': 8.32.0(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.32.0 debug: 4.4.0 - eslint: 9.23.0(jiti@1.21.7) - typescript: 5.5.4 + eslint: 9.26.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color @@ -5194,14 +5618,19 @@ snapshots: '@typescript-eslint/types': 8.28.0 '@typescript-eslint/visitor-keys': 8.28.0 - '@typescript-eslint/type-utils@8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/scope-manager@8.32.0': + dependencies: + '@typescript-eslint/types': 8.32.0 + '@typescript-eslint/visitor-keys': 8.32.0 + + '@typescript-eslint/type-utils@8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@typescript-eslint/typescript-estree': 8.27.0(typescript@5.5.4) - '@typescript-eslint/utils': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) + '@typescript-eslint/typescript-estree': 8.32.0(typescript@5.8.3) + '@typescript-eslint/utils': 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) debug: 4.4.0 - eslint: 9.23.0(jiti@1.21.7) - ts-api-utils: 2.1.0(typescript@5.5.4) - typescript: 5.5.4 + eslint: 9.26.0(jiti@1.21.7) + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 transitivePeerDependencies: - supports-color @@ -5209,7 +5638,9 @@ snapshots: '@typescript-eslint/types@8.28.0': {} - '@typescript-eslint/typescript-estree@8.27.0(typescript@5.5.4)': + '@typescript-eslint/types@8.32.0': {} + + '@typescript-eslint/typescript-estree@8.27.0(typescript@5.8.3)': dependencies: '@typescript-eslint/types': 8.27.0 '@typescript-eslint/visitor-keys': 8.27.0 @@ -5218,12 +5649,12 @@ snapshots: is-glob: 4.0.3 minimatch: 9.0.5 semver: 7.7.1 - ts-api-utils: 2.1.0(typescript@5.5.4) - typescript: 5.5.4 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/typescript-estree@8.28.0(typescript@5.5.4)': + '@typescript-eslint/typescript-estree@8.28.0(typescript@5.8.3)': dependencies: '@typescript-eslint/types': 8.28.0 '@typescript-eslint/visitor-keys': 8.28.0 @@ -5232,30 +5663,55 @@ snapshots: is-glob: 4.0.3 minimatch: 9.0.5 semver: 7.7.1 - ts-api-utils: 2.1.0(typescript@5.5.4) - typescript: 5.5.4 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/typescript-estree@8.32.0(typescript@5.8.3)': + dependencies: + '@typescript-eslint/types': 8.32.0 + '@typescript-eslint/visitor-keys': 8.32.0 + debug: 4.4.0 + fast-glob: 3.3.3 + is-glob: 4.0.3 + minimatch: 9.0.5 + semver: 7.7.1 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/utils@8.27.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@eslint-community/eslint-utils': 4.5.1(eslint@9.23.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.5.1(eslint@9.26.0(jiti@1.21.7)) '@typescript-eslint/scope-manager': 8.27.0 '@typescript-eslint/types': 8.27.0 - '@typescript-eslint/typescript-estree': 8.27.0(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) - typescript: 5.5.4 + '@typescript-eslint/typescript-estree': 8.27.0(typescript@5.8.3) + eslint: 9.26.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4)': + '@typescript-eslint/utils@8.28.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@eslint-community/eslint-utils': 4.5.1(eslint@9.23.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.5.1(eslint@9.26.0(jiti@1.21.7)) '@typescript-eslint/scope-manager': 8.28.0 '@typescript-eslint/types': 8.28.0 - '@typescript-eslint/typescript-estree': 8.28.0(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) - typescript: 5.5.4 + '@typescript-eslint/typescript-estree': 8.28.0(typescript@5.8.3) + eslint: 9.26.0(jiti@1.21.7) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3)': + dependencies: + '@eslint-community/eslint-utils': 4.7.0(eslint@9.26.0(jiti@1.21.7)) + '@typescript-eslint/scope-manager': 8.32.0 + '@typescript-eslint/types': 8.32.0 + '@typescript-eslint/typescript-estree': 8.32.0(typescript@5.8.3) + eslint: 9.26.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color @@ -5269,353 +5725,358 @@ snapshots: '@typescript-eslint/types': 8.28.0 eslint-visitor-keys: 4.2.0 - '@uiw/codemirror-extensions-basic-setup@4.23.10(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/commands@6.8.0)(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@typescript-eslint/visitor-keys@8.32.0': + dependencies: + '@typescript-eslint/types': 8.32.0 + eslint-visitor-keys: 4.2.0 + + '@uiw/codemirror-extensions-basic-setup@4.23.12(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/commands@6.8.1)(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: '@codemirror/autocomplete': 6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3) - '@codemirror/commands': 6.8.0 + '@codemirror/commands': 6.8.1 '@codemirror/language': 6.11.0 '@codemirror/lint': 6.8.2 '@codemirror/search': 6.5.6 '@codemirror/state': 6.5.2 '@codemirror/view': 6.36.4 - '@uiw/codemirror-theme-abcdef@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-abcdef@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-abyss@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-abyss@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-androidstudio@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-androidstudio@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-andromeda@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-andromeda@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-atomone@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-atomone@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-aura@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-aura@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-basic@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-basic@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-bbedit@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-bbedit@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-bespin@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-bespin@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-console@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-console@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-copilot@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-copilot@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-darcula@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-darcula@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-dracula@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-dracula@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-duotone@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-duotone@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-eclipse@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-eclipse@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-github@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-github@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-gruvbox-dark@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-gruvbox-dark@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-kimbie@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-kimbie@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-material@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-material@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-monokai-dimmed@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-monokai-dimmed@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-monokai@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-monokai@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-noctis-lilac@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-noctis-lilac@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-nord@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-nord@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-okaidia@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-okaidia@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-quietlight@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-quietlight@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-red@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-red@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-solarized@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-solarized@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-sublime@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-sublime@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-tokyo-night-day@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-tokyo-night-day@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-tokyo-night-storm@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-tokyo-night-storm@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-tokyo-night@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-tokyo-night@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-tomorrow-night-blue@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-tomorrow-night-blue@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-vscode@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-vscode@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-white@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-white@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-xcode@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-theme-xcode@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-themes-all@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': - dependencies: - '@uiw/codemirror-theme-abcdef': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-abyss': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-androidstudio': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-andromeda': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-atomone': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-aura': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-basic': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-bbedit': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-bespin': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-console': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-copilot': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-darcula': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-dracula': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-duotone': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-eclipse': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-github': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-gruvbox-dark': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-kimbie': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-material': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-monokai': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-monokai-dimmed': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-noctis-lilac': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-nord': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-okaidia': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-quietlight': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-red': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-solarized': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-sublime': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-tokyo-night': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-tokyo-night-day': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-tokyo-night-storm': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-tomorrow-night-blue': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-vscode': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-white': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-theme-xcode': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) - '@uiw/codemirror-themes': 4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes-all@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + dependencies: + '@uiw/codemirror-theme-abcdef': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-abyss': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-androidstudio': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-andromeda': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-atomone': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-aura': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-basic': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-bbedit': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-bespin': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-console': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-copilot': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-darcula': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-dracula': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-duotone': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-eclipse': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-github': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-gruvbox-dark': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-kimbie': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-material': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-monokai': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-monokai-dimmed': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-noctis-lilac': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-nord': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-okaidia': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-quietlight': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-red': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-solarized': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-sublime': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-tokyo-night': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-tokyo-night-day': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-tokyo-night-storm': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-tomorrow-night-blue': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-vscode': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-white': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-theme-xcode': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-themes': 4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-themes@4.23.10(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': + '@uiw/codemirror-themes@4.23.12(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)': dependencies: '@codemirror/language': 6.11.0 '@codemirror/state': 6.5.2 '@codemirror/view': 6.36.4 - '@uiw/react-codemirror@4.23.10(@babel/runtime@7.26.10)(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.36.4)(codemirror@6.0.1(@lezer/common@1.2.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@uiw/react-codemirror@4.23.12(@babel/runtime@7.26.10)(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.36.4)(codemirror@6.0.1(@lezer/common@1.2.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@babel/runtime': 7.26.10 - '@codemirror/commands': 6.8.0 + '@codemirror/commands': 6.8.1 '@codemirror/state': 6.5.2 '@codemirror/theme-one-dark': 6.1.2 '@codemirror/view': 6.36.4 - '@uiw/codemirror-extensions-basic-setup': 4.23.10(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/commands@6.8.0)(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) + '@uiw/codemirror-extensions-basic-setup': 4.23.12(@codemirror/autocomplete@6.18.2(@codemirror/language@6.11.0)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4)(@lezer/common@1.2.3))(@codemirror/commands@6.8.1)(@codemirror/language@6.11.0)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.6)(@codemirror/state@6.5.2)(@codemirror/view@6.36.4) codemirror: 6.0.1(@lezer/common@1.2.3) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) @@ -5681,14 +6142,14 @@ snapshots: d3-time-format: 4.1.0 internmap: 2.0.3 - '@vitejs/plugin-react-swc@3.8.1(@swc/helpers@0.5.15)(vite@5.4.17(@types/node@22.13.11))': + '@vitejs/plugin-react-swc@3.9.0(vite@5.4.19(@types/node@22.15.14))': dependencies: - '@swc/core': 1.11.12(@swc/helpers@0.5.15) - vite: 5.4.17(@types/node@22.13.11) + '@swc/core': 1.11.24 + vite: 5.4.19(@types/node@22.15.14) transitivePeerDependencies: - '@swc/helpers' - '@vitest/coverage-v8@2.1.9(vitest@2.1.9(@types/node@22.13.11)(happy-dom@17.4.4)(msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4)))': + '@vitest/coverage-v8@2.1.9(vitest@2.1.9(@types/node@22.15.14)(happy-dom@17.4.6)(msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3)))': dependencies: '@ampproject/remapping': 2.3.0 '@bcoe/v8-coverage': 0.2.3 @@ -5702,7 +6163,7 @@ snapshots: std-env: 3.8.1 test-exclude: 7.0.1 tinyrainbow: 1.2.0 - vitest: 2.1.9(@types/node@22.13.11)(happy-dom@17.4.4)(msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4)) + vitest: 2.1.9(@types/node@22.15.14)(happy-dom@17.4.6)(msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3)) transitivePeerDependencies: - supports-color @@ -5713,14 +6174,14 @@ snapshots: chai: 5.2.0 tinyrainbow: 1.2.0 - '@vitest/mocker@2.1.9(msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4))(vite@5.4.17(@types/node@22.13.11))': + '@vitest/mocker@2.1.9(msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3))(vite@5.4.19(@types/node@22.15.14))': dependencies: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 magic-string: 0.30.17 optionalDependencies: - msw: 2.7.3(@types/node@22.13.11)(typescript@5.5.4) - vite: 5.4.17(@types/node@22.13.11) + msw: 2.7.6(@types/node@22.15.14)(typescript@5.8.3) + vite: 5.4.19(@types/node@22.15.14) '@vitest/pretty-format@2.1.9': dependencies: @@ -5768,481 +6229,509 @@ snapshots: d3-selection: 3.0.0 d3-zoom: 3.0.0 - '@zag-js/accordion@1.8.2': + '@zag-js/accordion@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/anatomy@1.8.2': {} + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/aria-hidden@1.8.2': {} + '@zag-js/anatomy@1.15.0': {} - '@zag-js/auto-resize@1.8.2': + '@zag-js/angle-slider@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/rect-utils': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/aria-hidden@1.15.0': {} - '@zag-js/avatar@1.8.2': + '@zag-js/auto-resize@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/dom-query': 1.15.0 - '@zag-js/carousel@1.8.2': + '@zag-js/avatar@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/scroll-snap': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/checkbox@1.8.2': + '@zag-js/carousel@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/focus-visible': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/scroll-snap': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/clipboard@1.8.2': + '@zag-js/checkbox@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-visible': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/collapsible@1.8.2': + '@zag-js/clipboard@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/collection@1.8.2': + '@zag-js/collapsible@1.15.0': dependencies: - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/color-picker@1.8.2': + '@zag-js/collection@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/color-utils': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/utils': 1.15.0 - '@zag-js/color-utils@1.8.2': + '@zag-js/color-picker@1.15.0': dependencies: - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/color-utils': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/combobox@1.8.2': + '@zag-js/color-utils@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/aria-hidden': 1.8.2 - '@zag-js/collection': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/utils': 1.15.0 - '@zag-js/core@1.8.2': + '@zag-js/combobox@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/aria-hidden': 1.15.0 + '@zag-js/collection': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/date-picker@1.8.2(@internationalized/date@3.7.0)': + '@zag-js/core@1.15.0': dependencies: - '@internationalized/date': 3.7.0 - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/date-utils': 1.8.2(@internationalized/date@3.7.0) - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/live-region': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/dom-query': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/date-utils@1.8.2(@internationalized/date@3.7.0)': + '@zag-js/date-picker@1.15.0(@internationalized/date@3.8.1)': dependencies: - '@internationalized/date': 3.7.0 + '@internationalized/date': 3.8.1 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/date-utils': 1.15.0(@internationalized/date@3.8.1) + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/live-region': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/dialog@1.8.2': + '@zag-js/date-utils@1.15.0(@internationalized/date@3.8.1)': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/aria-hidden': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/focus-trap': 1.8.2 - '@zag-js/remove-scroll': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@internationalized/date': 3.8.1 - '@zag-js/dismissable@1.8.2': + '@zag-js/dialog@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 - '@zag-js/interact-outside': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/aria-hidden': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-trap': 1.15.0 + '@zag-js/remove-scroll': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/dom-query@1.8.1': + '@zag-js/dismissable@1.15.0': dependencies: - '@zag-js/types': 1.8.1 + '@zag-js/dom-query': 1.15.0 + '@zag-js/interact-outside': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/dom-query@1.8.2': + '@zag-js/dom-query@1.15.0': dependencies: - '@zag-js/types': 1.8.2 + '@zag-js/types': 1.15.0 - '@zag-js/editable@1.8.2': + '@zag-js/editable@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/interact-outside': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/interact-outside': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/file-upload@1.8.2': + '@zag-js/file-upload@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/file-utils': 1.8.2 - '@zag-js/i18n-utils': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/file-utils': 1.15.0 + '@zag-js/i18n-utils': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/file-utils@1.8.1': + '@zag-js/file-utils@1.15.0': dependencies: - '@zag-js/i18n-utils': 1.8.1 + '@zag-js/i18n-utils': 1.15.0 - '@zag-js/file-utils@1.8.2': + '@zag-js/floating-panel@1.15.0': dependencies: - '@zag-js/i18n-utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/rect-utils': 1.15.0 + '@zag-js/store': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/focus-trap@1.8.2': + '@zag-js/focus-trap@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 + '@zag-js/dom-query': 1.15.0 - '@zag-js/focus-visible@1.8.2': + '@zag-js/focus-visible@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 + '@zag-js/dom-query': 1.15.0 - '@zag-js/highlight-word@1.8.2': {} + '@zag-js/highlight-word@1.15.0': {} - '@zag-js/hover-card@1.8.2': + '@zag-js/hover-card@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/i18n-utils@1.8.1': + '@zag-js/i18n-utils@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.1 + '@zag-js/dom-query': 1.15.0 - '@zag-js/i18n-utils@1.8.2': + '@zag-js/interact-outside@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 + '@zag-js/dom-query': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/interact-outside@1.8.2': + '@zag-js/listbox@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/collection': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-visible': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/live-region@1.8.2': {} + '@zag-js/live-region@1.15.0': {} - '@zag-js/menu@1.8.2': + '@zag-js/menu@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/rect-utils': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/rect-utils': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/number-input@1.8.2': + '@zag-js/number-input@1.15.0': dependencies: - '@internationalized/number': 3.6.0 - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@internationalized/number': 3.6.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/pagination@1.8.2': + '@zag-js/pagination@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/pin-input@1.8.2': + '@zag-js/password-input@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/popover@1.8.2': + '@zag-js/pin-input@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/aria-hidden': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/focus-trap': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/remove-scroll': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/popper@1.8.2': + '@zag-js/popover@1.15.0': dependencies: - '@floating-ui/dom': 1.6.13 - '@zag-js/dom-query': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/aria-hidden': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-trap': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/remove-scroll': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/popper@1.15.0': + dependencies: + '@floating-ui/dom': 1.7.1 + '@zag-js/dom-query': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/presence@1.8.2': + '@zag-js/presence@1.15.0': dependencies: - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 - '@zag-js/progress@1.8.2': + '@zag-js/progress@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/qr-code@1.8.2': + '@zag-js/qr-code@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 proxy-memoize: 3.0.1 uqr: 0.1.2 - '@zag-js/radio-group@1.8.2': + '@zag-js/radio-group@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/focus-visible': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-visible': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/rating-group@1.8.2': + '@zag-js/rating-group@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/react@1.8.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@zag-js/react@1.15.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - '@zag-js/core': 1.8.2 - '@zag-js/store': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/core': 1.15.0 + '@zag-js/store': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@zag-js/rect-utils@1.8.2': {} + '@zag-js/rect-utils@1.15.0': {} - '@zag-js/remove-scroll@1.8.2': + '@zag-js/remove-scroll@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 + '@zag-js/dom-query': 1.15.0 - '@zag-js/scroll-snap@1.8.2': + '@zag-js/scroll-snap@1.15.0': dependencies: - '@zag-js/dom-query': 1.8.2 + '@zag-js/dom-query': 1.15.0 - '@zag-js/select@1.8.2': + '@zag-js/select@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/collection': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/collection': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/signature-pad@1.8.2': + '@zag-js/signature-pad@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 perfect-freehand: 1.2.2 - '@zag-js/slider@1.8.2': + '@zag-js/slider@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/splitter@1.8.2': + '@zag-js/splitter@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/steps@1.8.2': + '@zag-js/steps@1.15.0': dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 - '@zag-js/store@1.8.2': + '@zag-js/store@1.15.0': dependencies: proxy-compare: 3.0.1 - '@zag-js/switch@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/focus-visible': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/tabs@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/tags-input@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/auto-resize': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/interact-outside': 1.8.2 - '@zag-js/live-region': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/time-picker@1.8.2(@internationalized/date@3.7.0)': - dependencies: - '@internationalized/date': 3.7.0 - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/timer@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/toast@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/toggle-group@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/toggle@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/tooltip@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/focus-visible': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/store': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/tour@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dismissable': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/focus-trap': 1.8.2 - '@zag-js/interact-outside': 1.8.2 - '@zag-js/popper': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/tree-view@1.8.2': - dependencies: - '@zag-js/anatomy': 1.8.2 - '@zag-js/collection': 1.8.2 - '@zag-js/core': 1.8.2 - '@zag-js/dom-query': 1.8.2 - '@zag-js/types': 1.8.2 - '@zag-js/utils': 1.8.2 - - '@zag-js/types@1.8.1': + '@zag-js/switch@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-visible': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/tabs@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/tags-input@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/auto-resize': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/interact-outside': 1.15.0 + '@zag-js/live-region': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/time-picker@1.15.0(@internationalized/date@3.8.1)': + dependencies: + '@internationalized/date': 3.8.1 + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/timer@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/toast@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/toggle-group@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/toggle@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/tooltip@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-visible': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/store': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/tour@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dismissable': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/focus-trap': 1.15.0 + '@zag-js/interact-outside': 1.15.0 + '@zag-js/popper': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/tree-view@1.15.0': + dependencies: + '@zag-js/anatomy': 1.15.0 + '@zag-js/collection': 1.15.0 + '@zag-js/core': 1.15.0 + '@zag-js/dom-query': 1.15.0 + '@zag-js/types': 1.15.0 + '@zag-js/utils': 1.15.0 + + '@zag-js/types@1.15.0': dependencies: csstype: 3.1.3 - '@zag-js/types@1.8.2': - dependencies: - csstype: 3.1.3 + '@zag-js/utils@1.15.0': {} - '@zag-js/utils@1.8.2': {} + accepts@2.0.0: + dependencies: + mime-types: 3.0.1 + negotiator: 1.0.0 acorn-jsx@5.3.2(acorn@8.14.1): dependencies: @@ -6388,8 +6877,24 @@ snapshots: base16@1.0.0: {} + big-integer@1.6.52: {} + binary-extensions@2.3.0: {} + body-parser@2.2.0: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 4.4.0 + http-errors: 2.0.0 + iconv-lite: 0.6.3 + on-finished: 2.4.1 + qs: 6.14.0 + raw-body: 3.0.0 + type-is: 2.0.1 + transitivePeerDependencies: + - supports-color + brace-expansion@1.1.11: dependencies: balanced-match: 1.0.2 @@ -6412,6 +6917,8 @@ snapshots: builtin-modules@3.3.0: {} + bytes@3.1.2: {} + c12@1.11.1(magicast@0.3.5): dependencies: chokidar: 3.6.0 @@ -6464,9 +6971,9 @@ snapshots: loupe: 3.1.3 pathval: 2.0.0 - chakra-react-select@6.1.0(@chakra-ui/react@3.15.1(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.19)(next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + chakra-react-select@6.1.0(@chakra-ui/react@3.20.0(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.19)(next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - '@chakra-ui/react': 3.15.1(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@chakra-ui/react': 3.20.0(@emotion/react@11.14.0(@types/react@18.3.19)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) next-themes: 0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-select: 5.10.1(@types/react@18.3.19)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -6499,13 +7006,13 @@ snapshots: character-reference-invalid@2.0.1: {} - chart.js@4.4.8: + chart.js@4.4.9: dependencies: '@kurkle/color': 0.3.4 - chartjs-plugin-annotation@3.1.0(chart.js@4.4.8): + chartjs-plugin-annotation@3.1.0(chart.js@4.4.9): dependencies: - chart.js: 4.4.8 + chart.js: 4.4.9 check-error@2.1.1: {} @@ -6583,14 +7090,27 @@ snapshots: consola@3.4.2: {} + content-disposition@1.0.0: + dependencies: + safe-buffer: 5.2.1 + + content-type@1.0.5: {} + convert-source-map@1.9.0: {} + cookie-signature@1.2.2: {} + cookie@0.7.2: {} core-js-compat@3.41.0: dependencies: browserslist: 4.24.4 + cors@2.8.5: + dependencies: + object-assign: 4.1.1 + vary: 1.1.2 + cosmiconfig@7.1.0: dependencies: '@types/parse-json': 4.0.2 @@ -6607,6 +7127,12 @@ snapshots: transitivePeerDependencies: - encoding + cross-fetch@4.0.0: + dependencies: + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + cross-spawn@7.0.6: dependencies: path-key: 3.1.1 @@ -6745,6 +7271,8 @@ snapshots: delayed-stream@1.0.0: {} + depd@2.0.0: {} + dequal@2.0.3: {} destr@2.0.3: {} @@ -6776,6 +7304,8 @@ snapshots: eastasianwidth@0.2.0: {} + ee-first@1.1.1: {} + electron-to-chromium@1.5.123: {} elkjs@0.10.0: {} @@ -6784,6 +7314,8 @@ snapshots: emoji-regex@9.2.2: {} + encodeurl@2.0.0: {} + error-ex@1.3.2: dependencies: is-arrayish: 0.2.1 @@ -6918,17 +7450,49 @@ snapshots: escalade@3.2.0: {} + escape-html@1.0.3: {} + escape-string-regexp@1.0.5: {} escape-string-regexp@4.0.0: {} escape-string-regexp@5.0.0: {} - eslint-config-prettier@10.1.1(eslint@9.23.0(jiti@1.21.7)): + eslint-compat-utils@0.6.5(eslint@9.26.0(jiti@1.21.7)): + dependencies: + eslint: 9.26.0(jiti@1.21.7) + semver: 7.7.1 + + eslint-config-prettier@10.1.2(eslint@9.26.0(jiti@1.21.7)): + dependencies: + eslint: 9.26.0(jiti@1.21.7) + + eslint-json-compat-utils@0.2.1(eslint@9.26.0(jiti@1.21.7))(jsonc-eslint-parser@2.4.0): + dependencies: + eslint: 9.26.0(jiti@1.21.7) + esquery: 1.6.0 + jsonc-eslint-parser: 2.4.0 + + eslint-plugin-i18next@6.1.1: + dependencies: + lodash: 4.17.21 + requireindex: 1.1.0 + + eslint-plugin-jsonc@2.20.1(eslint@9.26.0(jiti@1.21.7)): dependencies: - eslint: 9.23.0(jiti@1.21.7) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.26.0(jiti@1.21.7)) + eslint: 9.26.0(jiti@1.21.7) + eslint-compat-utils: 0.6.5(eslint@9.26.0(jiti@1.21.7)) + eslint-json-compat-utils: 0.2.1(eslint@9.26.0(jiti@1.21.7))(jsonc-eslint-parser@2.4.0) + espree: 10.3.0 + graphemer: 1.4.0 + jsonc-eslint-parser: 2.4.0 + natural-compare: 1.4.0 + synckit: 0.11.8 + transitivePeerDependencies: + - '@eslint/json' - eslint-plugin-jsx-a11y@6.10.2(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-jsx-a11y@6.10.2(eslint@9.26.0(jiti@1.21.7)): dependencies: aria-query: 5.3.2 array-includes: 3.1.8 @@ -6938,7 +7502,7 @@ snapshots: axobject-query: 4.1.0 damerau-levenshtein: 1.0.8 emoji-regex: 9.2.2 - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) hasown: 2.0.2 jsx-ast-utils: 3.3.5 language-tags: 1.0.9 @@ -6947,34 +7511,34 @@ snapshots: safe-regex-test: 1.1.0 string.prototype.includes: 2.0.1 - eslint-plugin-perfectionist@4.10.1(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4): + eslint-plugin-perfectionist@4.12.3(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3): dependencies: - '@typescript-eslint/types': 8.28.0 - '@typescript-eslint/utils': 8.28.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) + '@typescript-eslint/types': 8.32.0 + '@typescript-eslint/utils': 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + eslint: 9.26.0(jiti@1.21.7) natural-orderby: 5.0.0 transitivePeerDependencies: - supports-color - typescript - eslint-plugin-prettier@5.2.3(eslint-config-prettier@10.1.1(eslint@9.23.0(jiti@1.21.7)))(eslint@9.23.0(jiti@1.21.7))(prettier@3.5.3): + eslint-plugin-prettier@5.4.0(eslint-config-prettier@10.1.2(eslint@9.26.0(jiti@1.21.7)))(eslint@9.26.0(jiti@1.21.7))(prettier@3.5.3): dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) prettier: 3.5.3 prettier-linter-helpers: 1.0.0 - synckit: 0.9.2 + synckit: 0.11.4 optionalDependencies: - eslint-config-prettier: 10.1.1(eslint@9.23.0(jiti@1.21.7)) + eslint-config-prettier: 10.1.2(eslint@9.26.0(jiti@1.21.7)) - eslint-plugin-react-hooks@4.6.2(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-react-hooks@4.6.2(eslint@9.26.0(jiti@1.21.7)): dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) - eslint-plugin-react-refresh@0.4.19(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-react-refresh@0.4.20(eslint@9.26.0(jiti@1.21.7)): dependencies: - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) - eslint-plugin-react@7.37.4(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-react@7.37.5(eslint@9.26.0(jiti@1.21.7)): dependencies: array-includes: 3.1.8 array.prototype.findlast: 1.2.5 @@ -6982,7 +7546,7 @@ snapshots: array.prototype.tosorted: 1.1.4 doctrine: 2.1.0 es-iterator-helpers: 1.2.1 - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) estraverse: 5.3.0 hasown: 2.0.2 jsx-ast-utils: 3.3.5 @@ -6996,14 +7560,14 @@ snapshots: string.prototype.matchall: 4.0.12 string.prototype.repeat: 1.0.0 - eslint-plugin-unicorn@55.0.0(eslint@9.23.0(jiti@1.21.7)): + eslint-plugin-unicorn@55.0.0(eslint@9.26.0(jiti@1.21.7)): dependencies: '@babel/helper-validator-identifier': 7.25.9 - '@eslint-community/eslint-utils': 4.5.1(eslint@9.23.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.5.1(eslint@9.26.0(jiti@1.21.7)) ci-info: 4.2.0 clean-regexp: 1.0.0 core-js-compat: 3.41.0 - eslint: 9.23.0(jiti@1.21.7) + eslint: 9.26.0(jiti@1.21.7) esquery: 1.6.0 globals: 15.15.0 indent-string: 4.0.0 @@ -7025,20 +7589,21 @@ snapshots: eslint-visitor-keys@4.2.0: {} - eslint@9.23.0(jiti@1.21.7): + eslint@9.26.0(jiti@1.21.7): dependencies: - '@eslint-community/eslint-utils': 4.5.1(eslint@9.23.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.5.1(eslint@9.26.0(jiti@1.21.7)) '@eslint-community/regexpp': 4.12.1 - '@eslint/config-array': 0.19.2 - '@eslint/config-helpers': 0.2.0 - '@eslint/core': 0.12.0 + '@eslint/config-array': 0.20.0 + '@eslint/config-helpers': 0.2.2 + '@eslint/core': 0.13.0 '@eslint/eslintrc': 3.3.1 - '@eslint/js': 9.23.0 - '@eslint/plugin-kit': 0.2.7 + '@eslint/js': 9.26.0 + '@eslint/plugin-kit': 0.2.8 '@humanfs/node': 0.16.6 '@humanwhocodes/module-importer': 1.0.1 '@humanwhocodes/retry': 0.4.2 - '@types/estree': 1.0.6 + '@modelcontextprotocol/sdk': 1.11.0 + '@types/estree': 1.0.7 '@types/json-schema': 7.0.15 ajv: 6.12.6 chalk: 4.1.2 @@ -7062,6 +7627,7 @@ snapshots: minimatch: 3.1.2 natural-compare: 1.4.0 optionator: 0.9.4 + zod: 3.24.4 optionalDependencies: jiti: 1.21.7 transitivePeerDependencies: @@ -7073,6 +7639,12 @@ snapshots: acorn-jsx: 5.3.2(acorn@8.14.1) eslint-visitor-keys: 4.2.0 + espree@9.6.1: + dependencies: + acorn: 8.14.1 + acorn-jsx: 5.3.2(acorn@8.14.1) + eslint-visitor-keys: 3.4.3 + esprima@4.0.1: {} esquery@1.6.0: @@ -7093,8 +7665,52 @@ snapshots: esutils@2.0.3: {} + etag@1.8.1: {} + + eventsource-parser@3.0.1: {} + + eventsource@3.0.6: + dependencies: + eventsource-parser: 3.0.1 + expect-type@1.2.0: {} + express-rate-limit@7.5.0(express@5.1.0): + dependencies: + express: 5.1.0 + + express@5.1.0: + dependencies: + accepts: 2.0.0 + body-parser: 2.2.0 + content-disposition: 1.0.0 + content-type: 1.0.5 + cookie: 0.7.2 + cookie-signature: 1.2.2 + debug: 4.4.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 2.1.0 + fresh: 2.0.0 + http-errors: 2.0.0 + merge-descriptors: 2.0.0 + mime-types: 3.0.1 + on-finished: 2.4.1 + once: 1.4.0 + parseurl: 1.3.3 + proxy-addr: 2.0.7 + qs: 6.14.0 + range-parser: 1.2.1 + router: 2.2.0 + send: 1.2.0 + serve-static: 2.2.0 + statuses: 2.0.1 + type-is: 2.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + extend@3.0.2: {} fast-deep-equal@3.1.3: {} @@ -7151,6 +7767,17 @@ snapshots: dependencies: to-regex-range: 5.0.1 + finalhandler@2.1.0: + dependencies: + debug: 4.4.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + find-root@1.1.0: {} find-up@4.1.0: @@ -7198,6 +7825,10 @@ snapshots: format@0.2.2: {} + forwarded@0.2.0: {} + + fresh@2.0.0: {} + fs-minipass@2.1.0: dependencies: minipass: 3.3.6 @@ -7306,7 +7937,7 @@ snapshots: optionalDependencies: uglify-js: 3.19.3 - happy-dom@17.4.4: + happy-dom@17.4.6: dependencies: webidl-conversions: 7.0.0 whatwg-mimetype: 3.0.0 @@ -7381,10 +8012,42 @@ snapshots: html-escaper@2.0.2: {} + html-parse-stringify@3.0.1: + dependencies: + void-elements: 3.1.0 + html-to-image@1.11.13: {} html-url-attributes@3.0.1: {} + http-errors@2.0.0: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.1 + toidentifier: 1.0.1 + + i18next-browser-languagedetector@8.1.0: + dependencies: + '@babel/runtime': 7.26.10 + + i18next-http-backend@3.0.2: + dependencies: + cross-fetch: 4.0.0 + transitivePeerDependencies: + - encoding + + i18next@25.1.2(typescript@5.8.3): + dependencies: + '@babel/runtime': 7.26.10 + optionalDependencies: + typescript: 5.8.3 + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + ignore@5.3.2: {} import-fresh@3.3.1: @@ -7396,6 +8059,8 @@ snapshots: indent-string@4.0.0: {} + inherits@2.0.4: {} + inline-style-parser@0.2.4: {} internal-slot@1.1.0: @@ -7406,6 +8071,8 @@ snapshots: internmap@2.0.3: {} + ipaddr.js@1.9.1: {} + is-alphabetical@1.0.4: {} is-alphabetical@2.0.1: {} @@ -7510,6 +8177,8 @@ snapshots: is-plain-obj@4.1.0: {} + is-promise@4.0.0: {} + is-regex@1.2.1: dependencies: call-bound: 1.0.4 @@ -7629,6 +8298,13 @@ snapshots: json-stable-stringify-without-jsonify@1.0.1: {} + jsonc-eslint-parser@2.4.0: + dependencies: + acorn: 8.14.1 + eslint-visitor-keys: 3.4.3 + espree: 9.6.1 + semver: 7.7.1 + jsonpointer@5.0.1: {} jsx-ast-utils@3.3.5: @@ -7863,8 +8539,12 @@ snapshots: dependencies: '@types/mdast': 4.0.4 + media-typer@1.1.0: {} + memoize-one@6.0.0: {} + merge-descriptors@2.0.0: {} + merge2@1.4.1: {} micromark-core-commonmark@2.0.3: @@ -8065,10 +8745,16 @@ snapshots: mime-db@1.52.0: {} + mime-db@1.54.0: {} + mime-types@2.1.35: dependencies: mime-db: 1.52.0 + mime-types@3.0.1: + dependencies: + mime-db: 1.54.0 + min-indent@1.0.1: {} minimatch@10.0.1: @@ -8111,12 +8797,12 @@ snapshots: ms@2.1.3: {} - msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4): + msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3): dependencies: '@bundled-es-modules/cookie': 2.0.1 '@bundled-es-modules/statuses': 1.0.1 '@bundled-es-modules/tough-cookie': 0.1.6 - '@inquirer/confirm': 5.1.8(@types/node@22.13.11) + '@inquirer/confirm': 5.1.8(@types/node@22.15.14) '@mswjs/interceptors': 0.37.6 '@open-draft/deferred-promise': 2.2.0 '@open-draft/until': 2.1.0 @@ -8132,7 +8818,7 @@ snapshots: type-fest: 4.37.0 yargs: 17.7.2 optionalDependencies: - typescript: 5.5.4 + typescript: 5.8.3 transitivePeerDependencies: - '@types/node' @@ -8144,6 +8830,8 @@ snapshots: natural-orderby@5.0.0: {} + negotiator@1.0.0: {} + neo-async@2.6.2: {} next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): @@ -8159,6 +8847,11 @@ snapshots: node-releases@2.0.19: {} + node-sql-parser@5.3.10: + dependencies: + '@types/pegjs': 0.10.6 + big-integer: 1.6.52 + normalize-package-data@2.5.0: dependencies: hosted-git-info: 2.8.9 @@ -8215,6 +8908,14 @@ snapshots: ohash@1.1.6: {} + on-finished@2.4.1: + dependencies: + ee-first: 1.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + openapi-merge-cli@1.3.2: dependencies: ajv: 6.12.6 @@ -8299,6 +9000,8 @@ snapshots: json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 + parseurl@1.3.3: {} + path-browserify@1.0.1: {} path-exists@4.0.0: {} @@ -8319,6 +9022,8 @@ snapshots: path-to-regexp@6.3.0: {} + path-to-regexp@8.2.0: {} + path-type@4.0.0: {} pathe@1.1.2: {} @@ -8337,6 +9042,8 @@ snapshots: picomatch@4.0.2: {} + pkce-challenge@5.0.0: {} + pkg-types@1.3.1: dependencies: confbox: 0.1.8 @@ -8387,6 +9094,11 @@ snapshots: property-information@7.0.0: {} + proxy-addr@2.0.7: + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + proxy-compare@3.0.1: {} proxy-from-env@1.1.0: {} @@ -8403,10 +9115,23 @@ snapshots: pure-color@1.3.0: {} + qs@6.14.0: + dependencies: + side-channel: 1.1.0 + querystringify@2.2.0: {} queue-microtask@1.2.3: {} + range-parser@1.2.1: {} + + raw-body@3.0.0: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.0 + iconv-lite: 0.6.3 + unpipe: 1.0.0 + rc9@2.1.2: dependencies: defu: 6.1.4 @@ -8419,9 +9144,9 @@ snapshots: lodash.flow: 3.5.0 pure-color: 1.3.0 - react-chartjs-2@5.3.0(chart.js@4.4.8)(react@18.3.1): + react-chartjs-2@5.3.0(chart.js@4.4.9)(react@18.3.1): dependencies: - chart.js: 4.4.8 + chart.js: 4.4.9 react: 18.3.1 react-dom@18.3.1(react@18.3.1): @@ -8430,7 +9155,7 @@ snapshots: react: 18.3.1 scheduler: 0.23.2 - react-hook-form@7.54.2(react@18.3.1): + react-hook-form@7.56.2(react@18.3.1): dependencies: react: 18.3.1 @@ -8439,6 +9164,16 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) + react-i18next@15.5.1(i18next@25.1.2(typescript@5.8.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.8.3): + dependencies: + '@babel/runtime': 7.26.10 + html-parse-stringify: 3.0.1 + i18next: 25.1.2(typescript@5.8.3) + react: 18.3.1 + optionalDependencies: + react-dom: 18.3.1(react@18.3.1) + typescript: 5.8.3 + react-icons@5.5.0(react@18.3.1): dependencies: react: 18.3.1 @@ -8642,6 +9377,8 @@ snapshots: require-directory@2.1.1: {} + requireindex@1.1.0: {} + requires-port@1.0.0: {} resolve-from@4.0.0: {} @@ -8662,32 +9399,42 @@ snapshots: robust-predicates@3.0.2: {} - rollup@4.39.0: + rollup@4.40.1: dependencies: '@types/estree': 1.0.7 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.39.0 - '@rollup/rollup-android-arm64': 4.39.0 - '@rollup/rollup-darwin-arm64': 4.39.0 - '@rollup/rollup-darwin-x64': 4.39.0 - '@rollup/rollup-freebsd-arm64': 4.39.0 - '@rollup/rollup-freebsd-x64': 4.39.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.39.0 - '@rollup/rollup-linux-arm-musleabihf': 4.39.0 - '@rollup/rollup-linux-arm64-gnu': 4.39.0 - '@rollup/rollup-linux-arm64-musl': 4.39.0 - '@rollup/rollup-linux-loongarch64-gnu': 4.39.0 - '@rollup/rollup-linux-powerpc64le-gnu': 4.39.0 - '@rollup/rollup-linux-riscv64-gnu': 4.39.0 - '@rollup/rollup-linux-riscv64-musl': 4.39.0 - '@rollup/rollup-linux-s390x-gnu': 4.39.0 - '@rollup/rollup-linux-x64-gnu': 4.39.0 - '@rollup/rollup-linux-x64-musl': 4.39.0 - '@rollup/rollup-win32-arm64-msvc': 4.39.0 - '@rollup/rollup-win32-ia32-msvc': 4.39.0 - '@rollup/rollup-win32-x64-msvc': 4.39.0 + '@rollup/rollup-android-arm-eabi': 4.40.1 + '@rollup/rollup-android-arm64': 4.40.1 + '@rollup/rollup-darwin-arm64': 4.40.1 + '@rollup/rollup-darwin-x64': 4.40.1 + '@rollup/rollup-freebsd-arm64': 4.40.1 + '@rollup/rollup-freebsd-x64': 4.40.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.40.1 + '@rollup/rollup-linux-arm-musleabihf': 4.40.1 + '@rollup/rollup-linux-arm64-gnu': 4.40.1 + '@rollup/rollup-linux-arm64-musl': 4.40.1 + '@rollup/rollup-linux-loongarch64-gnu': 4.40.1 + '@rollup/rollup-linux-powerpc64le-gnu': 4.40.1 + '@rollup/rollup-linux-riscv64-gnu': 4.40.1 + '@rollup/rollup-linux-riscv64-musl': 4.40.1 + '@rollup/rollup-linux-s390x-gnu': 4.40.1 + '@rollup/rollup-linux-x64-gnu': 4.40.1 + '@rollup/rollup-linux-x64-musl': 4.40.1 + '@rollup/rollup-win32-arm64-msvc': 4.40.1 + '@rollup/rollup-win32-ia32-msvc': 4.40.1 + '@rollup/rollup-win32-x64-msvc': 4.40.1 fsevents: 2.3.3 + router@2.2.0: + dependencies: + debug: 4.4.0 + depd: 2.0.0 + is-promise: 4.0.0 + parseurl: 1.3.3 + path-to-regexp: 8.2.0 + transitivePeerDependencies: + - supports-color + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 @@ -8700,6 +9447,8 @@ snapshots: has-symbols: 1.1.0 isarray: 2.0.5 + safe-buffer@5.2.1: {} + safe-push-apply@1.0.0: dependencies: es-errors: 1.3.0 @@ -8711,6 +9460,8 @@ snapshots: es-errors: 1.3.0 is-regex: 1.2.1 + safer-buffer@2.1.2: {} + scheduler@0.23.2: dependencies: loose-envify: 1.4.0 @@ -8721,6 +9472,31 @@ snapshots: semver@7.7.1: {} + send@1.2.0: + dependencies: + debug: 4.4.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 2.0.0 + http-errors: 2.0.0 + mime-types: 3.0.1 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + + serve-static@2.2.0: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 1.2.0 + transitivePeerDependencies: + - supports-color + set-function-length@1.2.2: dependencies: define-data-property: 1.1.4 @@ -8745,6 +9521,8 @@ snapshots: setimmediate@1.0.5: {} + setprototypeof@1.2.0: {} + shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0 @@ -8916,11 +9694,15 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} - synckit@0.9.2: + synckit@0.11.4: dependencies: - '@pkgr/core': 0.1.2 + '@pkgr/core': 0.2.4 tslib: 2.8.1 + synckit@0.11.8: + dependencies: + '@pkgr/core': 0.2.4 + tar@6.2.1: dependencies: chownr: 2.0.0 @@ -8952,6 +9734,8 @@ snapshots: dependencies: is-number: 7.0.0 + toidentifier@1.0.1: {} + tough-cookie@4.1.4: dependencies: psl: 1.15.0 @@ -8965,9 +9749,9 @@ snapshots: trough@2.2.0: {} - ts-api-utils@2.1.0(typescript@5.5.4): + ts-api-utils@2.1.0(typescript@5.8.3): dependencies: - typescript: 5.5.4 + typescript: 5.8.3 ts-is-present@1.2.2: {} @@ -8990,6 +9774,12 @@ snapshots: type-fest@4.37.0: {} + type-is@2.0.1: + dependencies: + content-type: 1.0.5 + media-typer: 1.1.0 + mime-types: 3.0.1 + typed-array-buffer@1.0.3: dependencies: call-bound: 1.0.4 @@ -9023,17 +9813,17 @@ snapshots: possible-typed-array-names: 1.1.0 reflect.getprototypeof: 1.0.10 - typescript-eslint@8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4): + typescript-eslint@8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.27.0(@typescript-eslint/parser@8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4))(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/parser': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - '@typescript-eslint/utils': 8.27.0(eslint@9.23.0(jiti@1.21.7))(typescript@5.5.4) - eslint: 9.23.0(jiti@1.21.7) - typescript: 5.5.4 + '@typescript-eslint/eslint-plugin': 8.32.0(@typescript-eslint/parser@8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.32.0(eslint@9.26.0(jiti@1.21.7))(typescript@5.8.3) + eslint: 9.26.0(jiti@1.21.7) + typescript: 5.8.3 transitivePeerDependencies: - supports-color - typescript@5.5.4: {} + typescript@5.8.3: {} ua-parser-js@1.0.40: {} @@ -9049,7 +9839,7 @@ snapshots: has-symbols: 1.1.0 which-boxed-primitive: 1.1.1 - undici-types@6.20.0: {} + undici-types@6.21.0: {} unified@11.0.5: dependencies: @@ -9086,6 +9876,8 @@ snapshots: universalify@0.2.0: {} + unpipe@1.0.0: {} + update-browserslist-db@1.1.3(browserslist@4.24.4): dependencies: browserslist: 4.24.4 @@ -9142,6 +9934,8 @@ snapshots: spdx-correct: 3.2.0 spdx-expression-parse: 3.0.1 + vary@1.1.2: {} + vfile-message@4.0.2: dependencies: '@types/unist': 3.0.3 @@ -9152,13 +9946,13 @@ snapshots: '@types/unist': 3.0.3 vfile-message: 4.0.2 - vite-node@2.1.9(@types/node@22.13.11): + vite-node@2.1.9(@types/node@22.15.14): dependencies: cac: 6.7.14 debug: 4.4.0 es-module-lexer: 1.6.0 pathe: 1.1.2 - vite: 5.4.17(@types/node@22.13.11) + vite: 5.4.19(@types/node@22.15.14) transitivePeerDependencies: - '@types/node' - less @@ -9170,23 +9964,23 @@ snapshots: - supports-color - terser - vite-plugin-css-injected-by-js@3.5.2(vite@5.4.17(@types/node@22.13.11)): + vite-plugin-css-injected-by-js@3.5.2(vite@5.4.19(@types/node@22.15.14)): dependencies: - vite: 5.4.17(@types/node@22.13.11) + vite: 5.4.19(@types/node@22.15.14) - vite@5.4.17(@types/node@22.13.11): + vite@5.4.19(@types/node@22.15.14): dependencies: esbuild: 0.21.5 postcss: 8.5.3 - rollup: 4.39.0 + rollup: 4.40.1 optionalDependencies: - '@types/node': 22.13.11 + '@types/node': 22.15.14 fsevents: 2.3.3 - vitest@2.1.9(@types/node@22.13.11)(happy-dom@17.4.4)(msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4)): + vitest@2.1.9(@types/node@22.15.14)(happy-dom@17.4.6)(msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3)): dependencies: '@vitest/expect': 2.1.9 - '@vitest/mocker': 2.1.9(msw@2.7.3(@types/node@22.13.11)(typescript@5.5.4))(vite@5.4.17(@types/node@22.13.11)) + '@vitest/mocker': 2.1.9(msw@2.7.6(@types/node@22.15.14)(typescript@5.8.3))(vite@5.4.19(@types/node@22.15.14)) '@vitest/pretty-format': 2.1.9 '@vitest/runner': 2.1.9 '@vitest/snapshot': 2.1.9 @@ -9202,12 +9996,12 @@ snapshots: tinyexec: 0.3.2 tinypool: 1.0.2 tinyrainbow: 1.2.0 - vite: 5.4.17(@types/node@22.13.11) - vite-node: 2.1.9(@types/node@22.13.11) + vite: 5.4.19(@types/node@22.15.14) + vite-node: 2.1.9(@types/node@22.15.14) why-is-node-running: 2.3.0 optionalDependencies: - '@types/node': 22.13.11 - happy-dom: 17.4.4 + '@types/node': 22.15.14 + happy-dom: 17.4.6 transitivePeerDependencies: - less - lightningcss @@ -9219,6 +10013,8 @@ snapshots: - supports-color - terser + void-elements@3.1.0: {} + w3c-keyname@2.2.8: {} web-worker@1.5.0: {} @@ -9308,6 +10104,8 @@ snapshots: string-width: 5.1.2 strip-ansi: 7.1.0 + wrappy@1.0.2: {} + xtend@4.0.2: {} y18n@5.0.8: {} @@ -9316,6 +10114,8 @@ snapshots: yaml@1.10.2: {} + yaml@2.8.0: {} + yargs-parser@21.1.1: {} yargs@17.7.2: @@ -9332,6 +10132,12 @@ snapshots: yoctocolors-cjs@2.1.2: {} + zod-to-json-schema@3.24.5(zod@3.24.4): + dependencies: + zod: 3.24.4 + + zod@3.24.4: {} + zustand@4.5.6(@types/react@18.3.19)(react@18.3.1): dependencies: use-sync-external-store: 1.4.0(react@18.3.1) @@ -9339,7 +10145,7 @@ snapshots: '@types/react': 18.3.19 react: 18.3.1 - zustand@5.0.3(@types/react@18.3.19)(react@18.3.1)(use-sync-external-store@1.4.0(react@18.3.1)): + zustand@5.0.4(@types/react@18.3.19)(react@18.3.1)(use-sync-external-store@1.4.0(react@18.3.1)): optionalDependencies: '@types/react': 18.3.19 react: 18.3.1 diff --git a/airflow-core/src/airflow/ui/public/i18n/README.md b/airflow-core/src/airflow/ui/public/i18n/README.md new file mode 100644 index 0000000000000..4fa50122cf7a4 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/README.md @@ -0,0 +1,270 @@ + + +# Internationalization (i18n) Policy + +## 1. Purpose & scope + +This document outlines the policy for internationalization (i18n) in Apache Airflow, detailing the lifecycle of translations within the project. +This policy aims to avoid inconsistencies, maintenance issues, unclear ownership, and to ensure translation quality. + +### Scope + +This policy applies to: + +- Each supported locale included in `airflow-core/src/airflow/ui/public/i18n/locales`. +- Contributors making changes in the default locale (English). +- Contributors suggesting new locales to be added to the codebase. +- Maintainers of supported locales in any role defined below. +- Committers and PMC. +- Release managers. + +> [!NOTE] +> This policy currently applies only to changes made in Apache Airflow core, as i18n is not yet implemented for providers (including auth managers). When such support is added, this policy should be updated to reflect the expanded scope. + +## 2. Definitions + +**Internationalization (i18n)** - The process of designing a software application so that it can be adapted to various languages and regions without engineering changes (see also the [Wikipedia article](https://en.wikipedia.org/wiki/Internationalization_and_localization)). + +**Supported locale** - An officially accepted locale in `airflow-core/src/airflow/ui/public/i18n/locales`. + +**Default locale** - English (`en`), the primary locale and fallback for all other locales. + +**Translation owner** - Designated contributor responsible for maintaining a supported locale. + +**Code owner** - Apache Airflow committer with write permissions, listed in `.github/CODEOWNERS`. + +**Translation sponsor** - Apache Airflow committer supporting a non-committer translation owner (e.g., by communicating in the dev list or merging Pull Requests on their behalf). + +**Engaged translator** - Active contributor participating in translation without formal ownership. + +**Inactive translation/code owner** — A translation/code owner is considered inactive if they meet either of the following criteria: + +- The locale under their responsibility has remained incomplete for at least 2 consecutive releases. +- They have not participated in the Apache Airflow project for more than 12 months. + +**Dev list** - The Apache Airflow development mailing list: dev@airflow.apache.org. + +## 3. Wording/Phrasing + +- Unless explicitly stated otherwise, all references to directories and files in this document pertain to those in the `main` branch. +- Where emphasised by capital letters, the keywords "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", +"SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be interpreted as described in RFC 2119. + +## 4. Roles & responsibilities + +### 4.1. Translation owner + +- Translation owners are responsible for the following, in their assigned supported locale, according to the established quality standards and procedures stated below: + - Ensuring locale remains up-to-date with source code changes in the default locale. + - Reviewing the language aspects of translation-related Pull Requests (PRs). + - Resolving translation-related conflicts in PRs. + - Ensuring translation reflects current language usage and terminology. + - Resolving translation-related GitHub issues and discussions. + +### 4.2. Code owner + +- Code owners are responsible for the following, in their assigned supported locale, according to the procedures stated below: + - Reviewing the technical aspects of translation-related PRs (e.g., linting, formatting, etc.). + - Merging translation-related PRs approved by the translation owner. + - Resolving translation-related conflicts in PRs, when there's a conflict between translation owners. + - Managing translation-related GitHub issues and discussions, when needed (e.g., closing GitHub issues). +- Code owners who act as translation sponsors are also responsible for: + - Ensuring that the translation owner is active and able to maintain the translation. + - Act according to section 6.4 when the translation owner relinquishes their role or become inactive. + +### 4.3. Engaged translator + +- Engaged translators do not have any formal responsibilities, but they are encouraged to contribute to supported locales by: + - Suggesting improvements. + - Reviewing PRs. + - Reporting issues or inconsistencies in translations. + - Participating in discussions related to translations. + - Assisting translation owners with their tasks. + - Being 3rd party reviewers for translation-related conflicts, when needed. +- Engaged translators may be mentioned in a comment in the `.github/CODEOWNERS` file. +- Suitable candidates for translation ownership may be suggested from engaged translators, upon their consent and approval by the procedure in section 6.1. + +## 5. Requirements + +### 5.1. Translation ownership and code ownership + +- Each supported locale, except for the default language, MUST have at least one translation owner and at least one code owner assigned at all times, with these considerations: + - Ownership for both roles MUST be approved according to the process discussed in section 6.1. + - A single Apache Airflow committer MAY serve as both code owner and translation owner for the same locale. + - If none of the translation owners are code owners - there MAY be a translation sponsor assigned as a code owner. +- When the above is not met, steps mentioned in section 6.4 SHOULD be taken by the appropriate roles. + +> [!NOTE] +> It is welcomed and desired to have more than one translation owner to enable peer reviews and provide coverage during absences. + +### 5.2. Adding new locales + +To accept a new supported locale to the codebase, it MUST be approved through the process discussed in section 6.2. + +### 5.3. Translation owners candidates + +- Translation owners candidates MUST declare and demonstrate a sufficient level of proficiency in the target language for translation purposes, including technical terminology (as detailed in section 6.5). +- Translation owners candidates, who are non-committers, MUST also meet the following criteria: + - They are active long-term contributors to the Apache Airflow project at the time of request. + - They have basic skills of working with Git and GitHub, as well as modifying JSON translation files within their target language. + - They have the support of an Apache Airflow committer who will act as a translation sponsor. + +### 5.4. Resolution of translation conflicts + +Translation conflicts MUST be resolved according to the procedures outlined in section 6.3. + +### 5.5. Adding or rephrasing terms + +- When new terms are added to the default locale, all translation owners SHOULD create a follow-up PR to comply with the changes in their assigned locale. +- When existing terms are rephrased in the default language (key is the same but value changed), all translation owners SHOULD do the same as above, if the change in the intent or meaning affects the translation. +- In busy times with many parallel UI changes it is acceptable to batch changes together. Differences SHOULD be cleared prior to a release at the latest. + +> [!NOTE] +> Tooling for detecting missing terms is available (see Tools & Resources section below). + +### 5.6. Deprecating / refactoring terms + +- When existing terms are deprecated or refactored in the default locale (key renamed/relocated but value unchanged), **the contributor initiating the change holds responsible for updating all relevant locale files, and not any of the locale's owners**. When such available, automation through Breeze tooling SHOULD be used. + +### 5.7. Merging of translation-related Pull Requests (PRs) + +- Before merging any translation-related PR, it MUST be: + - Approved by a translation owner of the respective locale for language aspects, according to the standards and guidelines. + - When a translation owner initiates a PR and is the only one assigned to the locale, they SHOULD instead ask for approval from a third party (e.g., engaged translator), or if such is not available, declare their self-approval for the language aspects. + - Approved by a code owner, or another committer on their behalf, for technical aspects (e.g., linting, formatting, etc.). +- Before merging a translation-related PR, the translation SHOULD be checked for completeness using the provided tools (see section 8). + +> [!WARNING] +> In languages with different word order than English, or in Right-To-Left (RTL) languages, it is important to validate that the changes are properly reflected in the UI. +> If they are not, please raise a GitHub issue or a PR for fixing it (separately from the translation PR). + +### 5.8. Version release + +- Release managers MUST follow the requirements for releasing changes in supported locales defined in the [Release Management Policy](../../../../../../dev/README_RELEASE_AIRFLOW.md). + +## 6. Procedures + +### 6.1. Approval of ownership candidates + +- The designated code owner, should post a thread to the dev list, requesting the approval of: + - Introducing a new locale (including a link to the PR) + - Translation owner(s) in the suggested locale for non committer candidates. (sponsored) +- Within the thread, the code owner should demonstrate that the translation owner is suitable for the role, according to the requirements in section 5.3. +- Approval of any translation owner who is not a committer requires at least one binding vote of 1 PMC member, and no objections from other committers/PMC. +- Approval of any translation owner who is also a code owner (committer) do not need to be voted on. + +### 6.2. Approval of a new locale + +The following steps outline the process for approving a new locale to be added to the supported locales: + +- Creating a PR for adding the suggested locale to the codebase ([see example](https://github.com/apache/airflow/pull/51258/files)), which includes: + - The locale files (translated according to the guidelines) in the `airflow-core/src/airflow/ui/public/i18n/locales/` directory, where `` is the code of the language according to ISO 639-1 standard (e.g., `fr` for French). Languages with regional variants should be handled in separate directories, where the name is suffixed with `-`, and `` is the variant that follows ISO 3166-1 or UN M.49 codes in lowercase (e.g., `zh-tw` for Taiwanese Mandarin). + - Making the required modifications in `airflow-core/src/airflow/ui/src/i18n/config.ts` ([see example](https://github.com/apache/airflow/pull/51258/files#diff-bfb4d5fafd26d206fb4a545a41ba303f33d15a479d21e0a726fd743bdf9717ff)). + - Changes to the `.github/CODEOWNERS` file to include the designated code owner(s) and translation owner(s) for the new locale, considering the following: + - A code owner who is also a translation sponsor should be indicated in a comment as well. + - If the PR author is neither eligible nor willing to become both of these roles, they should suggest relevant candidates for the missing role(s), or call for volunteers. +- Applying the procedure in section 6.1. to approve the identities of the code owner(s) and the translation owner(s). +- Only after the steps above are completed, the PR for the new translation may be merged (by the requirements in section 5.7). + +### 6.3. Translation conflict resolution + +When a translation conflict arises in a locale-related PR, the following steps will be taken in order: + +- The involved parties should first try to reach a consensus through discussion in the PR. +- If no consensus is reached, a translation owner may decide the outcome. +- If multiple translation owners are involved and cannot reach consensus, the code owner will decide. If the code owner is sponsored, +they should base their decision on a neutral source (e.g., a third-party opinion, translation tool, or LLM). +- If the conflict is between code owners, a PMC member will be involved to resolve the conflict. + +### 6.4. Relinquishing translation/code ownership + + - When a code owner asks to relinquish their role, or they become inactive, any another committer should: + - Raise a PR for removal of the previous code owner from the `.github/CODEOWNERS` file. + - Post a thread in the dev list that they step in as the code owner (either as a translation sponsor, or a translation owner according to steps discussed in section 6.1). + - When a translation owner asks to relinquish their role, or they become inactive, and there are no other active translation owners, the code owner should: + - Raise a PR for removal of the translation owner from the `.github/CODEOWNERS` file. + - Post a thread in the dev list that they are looking for assigning someone else as the translation owner within 30 days. + - If a replacement is found within this time, they should be approved according to section 6.1. + - Otherwise, the code owner should raise a vote in the dev list for the removal of the translation from the codebase (7 days vote, PMC and committers votes are counted as binding). + +### 6.5 Demonstrating language proficiency + +Language proficiency for translation owners can be demonstrated through any of the following means: + +- Communications in open-source projects, social media, mailing lists, forums, or any other platforms in the target language. +- Direct communication with a proficient committer in the target language. +- Official language certifications (this is not a mandatory requirement). + +## 7. Standards & guidelines + +> [!CAUTION] +> Usage of language that defies Apache Airflow's [code of conduct](http://airflow.apache.org/code-of-conduct/) is prohibited in any circumstances. + +- Translations should be based on the default language (English). When translating a language that has already a similar translation supported +(e.g., Portuguese vs. Spanish), the other language might be used as a reference, but still the default language (English) should be the primary source for translations. +- Translations should be accurate, maintaining original meaning and intent. +- Translations should be complete, covering all terms and phrases in the default language. +- Translation of technical terminology should be consistent (for example: Dag, Task, Operator, etc.). +- Language should be polite and neutral in tone. +- Local conventions should be considered (e.g., date formats, number formatting, formal vs. informal tone, etc.). + - In case that local conventions requires deviation from any of these guidelines, exceptions may be requested via PR or a thread in the dev list. +- Formatting, placeholders, and variable substitutions must be preserved. + +## 8. Tools & resources + +### 8.1. Checking completeness of i18n files + +All files: + +```bash +uv run dev/i18n/check_translations_completeness.py +``` + +Files for specific languages: + +```bash +uv run dev/i18n/check_translations_completeness.py --language +``` + +Where `` is the code of the language you want to check, e.g., `en`, `fr`, `de`, etc. + +Adding missing translations (with `TODO: translate` prefix): + +```bash +uv run dev/i18n/check_translations_completeness.py --language --add-missing +``` + +## 9. Compliance & enforcement + +> [!NOTE] +> As of the time of writing, this policy is not enforced by any automated checks. +> The following describe the desired future state of compliance and enforcement. + +- Automated checks SHOULD verify once in a while that all languages have corresponding entries for new terms in the default language. When translations are missing, relevant code owners should be notified. +- Automated checks SHOULD allow a person doing translation to select the language and aid them in adding new translations so that they do not have to compare them manually. Possibly it can be done by adding `-–add-missing` to the verifying script that will add new entries with `TODO: translate: ENGLISH VERSION` and add pre-commit to not allow such `TODO:` entries to be committed. + +## 10. Exceptions + +If any exceptions to this policy are needed, they MUST be discussed and approved by voting in the dev list beforehand. + +## 11. Review and updates + +This policy will be reviewed and updated as needed to ensure it remains relevant and effective. +Depending on the nature of the change, suggested updates might need to be discussed and approved by voting in the dev list. diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/admin.json new file mode 100644 index 0000000000000..21e002ba0cd6a --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/admin.json @@ -0,0 +1,199 @@ +{ + "columns": { + "description": "الوصف", + "key": "المفتاح", + "name": "الاسم", + "value": "القيمة" + }, + "config": { + "columns": { + "section": "القسم" + }, + "title": "Airflow إعدادات" + }, + "connections": { + "add": "إضافة موَّصل", + "columns": { + "connectionId": "معرِّف الموَّصل", + "connectionType": "نوع الموَّصل", + "host": "المضيف", + "port": "المنفذ" + }, + "connection_few": "موَّصل", + "connection_many": "موَّصلات", + "connection_one": "موَّصل", + "connection_other": "موَّصلات", + "connection_two": "موَّصلان", + "connection_zero": "لا يوجد أي موصل", + "delete": { + "deleteConnection_few": "حذف {{count}} موَّصلات", + "deleteConnection_many": "حذف {{count}} موَّصل", + "deleteConnection_one": "حذف موَّصل واحد", + "deleteConnection_other": "حذف {{count}} موَّصل", + "deleteConnection_two": "حذف موَّصلين", + "deleteConnection_zero": "لا يوجد أي موَّصل للحذف", + "firstConfirmMessage_few": "أنت على وشك حذف الموَّصلات التالية:", + "firstConfirmMessage_many": "أنت على وشك حذف الموَّصلات التالية:", + "firstConfirmMessage_one": "أنت على وشك حذف الموَّصل التالي:", + "firstConfirmMessage_other": "أنت على وشك حذف الموَّصلات التالية:", + "firstConfirmMessage_two": "أنت على وشك حذف الموَّصلين التاليين:", + "firstConfirmMessage_zero": "لا يوجد أي موصل للحذف", + "title": "حذف الموَّصل" + }, + "edit": "تعديل الموَّصل", + "form": { + "connectionIdRequired": "مطلوب معرِّف الموَّصل", + "connectionIdRequirement": "لا يمكن أن يحتوي معرِّف الموَّصل على مسافات فقط", + "connectionTypeRequired": "مطلوب نوع الموَّصل", + "extraFields": "حقول إضافية", + "extraFieldsJson": "JSON حقول إضافية بصيغة", + "helperText": "نوع الموَّصل مفقود؟ تأكد من تثبيت حزمة Airflow المناسبة.", + "helperTextForRedactedFields": "لن تتغير الحقول المحجوبة ('***') إذا لم يتم تعديلها.", + "selectConnectionType": "اختر نوع الموَّصل", + "standardFields": "حقول قياسية" + }, + "nothingFound": { + "description": "الموصلات المعرّفة عبر متغيرات البيئة أو مدراء الأسرار غير مدرجة هنا.", + "documentationLink": "تعلم المزيد في وثائق Airflow.", + "learnMore": "يتم حل هذه المشاكل في وقت التشغيل وهي غير مرئية في واجهة المستخدم", + "title": "لا توجد موَّصلات متاحة" + }, + "searchPlaceholder": "البحث عن موَّصلات", + "test": "اختبار الموَّصل", + "testDisabled": "اختبار الموَّصل معطل. تواصل مع المسؤول لتفعيله.", + "typeMeta": { + "error": "فشل في استرداد نوع الموَّصل", + "standardFields": { + "description": "الوصف", + "host": "المضيف", + "login": "معرف الدخول", + "password": "كلمة المرور", + "port": "المنفذ", + "url_schema": "المخطط" + } + } + }, + "deleteActions": { + "button": "حذف", + "modal": { + "confirmButton": "نعم، حذف الموَّصلات", + "secondConfirmMessage": "هذا الإجراء دائم و لا يمكن التراجع عنه.", + "thirdConfirmMessage": "هل أنت متأكد أنك تريد المواصلة؟" + }, + "selected": "المحدد", + "tooltip": "حذف الموَّصلات المحددة" + }, + "formActions": { + "reset": "إعادة تعيين", + "save": "حفظ" + }, + "plugins": { + "columns": { + "source": "المصدر" + }, + "importError_few": "خطأ في استيراد المكوّن الإضافي", + "importError_many": "أخطاء في استيراد المكوّن الإضافي", + "importError_one": "خطأ في استيراد المكوّن الإضافي", + "importError_other": "أخطاء في استيراد المكوّن الإضافي", + "importError_two": "خطأان في استيراد المكوّن الإضافي", + "importError_zero": "لا يوجد أي خطأ في استيراد المكوّن الإضافي", + "searchPlaceholder": "البحث عن الملفات" + }, + "pools": { + "add": "إضافة حصص", + "deferredSlotsIncluded": "شمل الحصص المؤجلة", + "delete": { + "title": "حذف مجموعة", + "warning": "هذا الإجراء سيؤدي إلى حذف المجموعة المحددة و من الممكن أن يؤثر على المهام الجارية." + }, + "edit": "تعديل المجموعة", + "form": { + "checkbox": "حدد لشمل المهام المؤجلة عند حساب حصص المجموعة المفتوحة", + "description": "الوصف", + "includeDeferred": "شمل المهام المؤجلة", + "nameMaxLength": "الاسم يمكن أن يحتوي على 250 حرف كحد أقصى", + "nameRequired": "الاسم مطلوب", + "slots": "حصص" + }, + "noPoolsFound": "لا توجد مجموعات موارد متاحة", + "pool_few": "مجموعات", + "pool_many": "مجموعة", + "pool_one": "مجموعة", + "pool_other": "مجموعة", + "pool_two": "مجموعتان", + "pool_zero": "لا يوجد أي مجموعة", + "searchPlaceholder": "البحث عن مجموعات", + "sort": { + "asc": "الاسم (A-Z)", + "desc": "الاسم (Z-A)", + "placeholder": "الفرز حسب" + } + }, + "providers": { + "columns": { + "packageName": "اسم الحزمة", + "version": "الإصدار" + } + }, + "variables": { + "add": "إضافة متغير", + "columns": { + "isEncrypted": "مُشفر" + }, + "delete": { + "deleteVariable_few": "حذف {{count}} متغيرات", + "deleteVariable_many": "حذف {{count}} متغير", + "deleteVariable_one": "حذف متغير واحد", + "deleteVariable_other": "حذف {{count}} متغير", + "deleteVariable_two": "حذف متغيرين", + "deleteVariable_zero": "لا يوجد أي متغير للحذف", + "firstConfirmMessage_few": "أنت على وشك حذف المتغيرات التالية:", + "firstConfirmMessage_many": "أنت على وشك حذف المتغيرات التالية:", + "firstConfirmMessage_one": "أنت على وشك حذف المتغير التالي:", + "firstConfirmMessage_other": "أنت على وشك حذف المتغيرات التالية:", + "firstConfirmMessage_two": "أنت على وشك حذف المتغيرين التاليين:", + "firstConfirmMessage_zero": "لا يوجد أي متغير للحذف", + "title": "حذف المتغير", + "tooltip": "حذف المتغيرات المحددة" + }, + "edit": "تعديل المتغير", + "export": "تصدير", + "exportTooltip": "تصدير المتغيرات المحددة", + "form": { + "invalidJson": "JSON غير صالح", + "keyMaxLength": "المفتاح يمكن أن يحتوي على 250 حرف كحد أقصى", + "keyRequired": "المفتاح مطلوب", + "valueRequired": "القيمة مطلوبة" + }, + "import": { + "button": "استيراد", + "conflictResolution": "اختيار حل تعارض المتغير", + "errorParsingJsonFile": "خطأ في تحليل ملف JSON: حمل ملف JSON يحتوي علي متغيرات (مثال: {\"key\": \"value\", ...})", + "options": { + "fail": { + "description": "يفشل في استيراد المتغير في حالة وجود مسبقاً.", + "title": "فشل" + }, + "overwrite": { + "description": "يستبدل المتغير في حالة وجود تعارض.", + "title": "استبدال" + }, + "skip": { + "description": "يتخطى المتغير في حالة وجود مسبقاً", + "title": "تخطي" + } + }, + "title": "استيراد المتغيرات", + "upload": "تحميل ملف JSON", + "uploadPlaceholder": "حمل ملف JSON يحتوي علي متغيرات (مثال: {\"key\": \"value\", ...})" + }, + "noRowsMessage": "لا توجد متغيرات", + "searchPlaceholder": "البحث عن متغيرات", + "variable_few": "متغيرات", + "variable_many": "متغيرات", + "variable_one": "متغير", + "variable_other": "متغيرات", + "variable_two": "متغيران", + "variable_zero": "لا يوجد أي متغير" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/assets.json new file mode 100644 index 0000000000000..2e1cff6cff333 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "Dags المستهلكة", + "createEvent": { + "button": "إنشاء حدث", + "manual": { + "description": "إنشاء الحدث لأصل مباشرةً", + "extra": "اضافات حدث الأصل", + "label": "يدوياً" + }, + "materialize": { + "description": "قم بتشغيل ال Dag السابقة من هذا الأصل", + "descriptionWithDag": "قم بتشغيل ال Dag السابقة من هذا الأصل: {{dagName}}", + "label": "تجسيد", + "unpauseDag": "إلغاء إيقاف {{dagName}} عند التشغيل" + }, + "success": { + "manualDescription": "تم إنشاء حدث الأصل يدوياً بنجاح.", + "manualTitle": "تم إنشاء حدث الأصل", + "materializeDescription": "تم تشغيل ال Dag السابقة {{dagId}} بنجاح.", + "materializeTitle": "تجسيد الأصل" + }, + "title": "إنشاء حدث أصل لـ {{name}}" + }, + "group": "المجموعة", + "lastAssetEvent": "آخر حدث أصل", + "name": "الاسم", + "producingTasks": "المهام المنتجة", + "scheduledDags": "Dags المجدولة", + "searchPlaceholder": "البحث عن الأصول" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/browse.json new file mode 100644 index 0000000000000..775230d598cc5 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "طي جميع الJSON الإضافية", + "expandAllExtra": "توسيع جميع الJSON الإضافية" + }, + "columns": { + "event": "اصل", + "extra": "إضافي", + "user": "مستخدم", + "when": "متى" + }, + "title": "سجل المراجعة" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "مفتاح", + "value": "قيمة" + }, + "title": "(XCom) إكس كوم" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/common.json new file mode 100644 index 0000000000000..09f41b97402d0 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/common.json @@ -0,0 +1,346 @@ +{ + "admin": { + "Config": "تكوينات", + "Connections": "وصلات", + "Plugins": "إضافات", + "Pools": "مجموعة موارد", + "Providers": "حُزم", + "Variables": "متغيرات" + }, + "asset_few": "أصول", + "asset_many": "أصول", + "asset_one": "أصل", + "asset_other": "أصول", + "asset_two": "أصلان", + "asset_zero": "لا يوجد أي أصل", + "assetEvent_few": "وقائعات أصول", + "assetEvent_many": "وقائعات أصول", + "assetEvent_one": "واقعة أصل", + "assetEvent_other": "وقائعات أصول", + "assetEvent_two": "واقعتا أصل", + "assetEvent_zero": "لا يوجد أي واقعة أصل", + "backfill_few": "إعادات ملء", + "backfill_many": "إعادات ملء", + "backfill_one": "إعادة ملء", + "backfill_other": "إعادات ملء", + "backfill_two": "إعادتا ملء", + "backfill_zero": "لا يوجد أي إعادة ملء", + "browse": { + "auditLog": "سجل المراجعة", + "requiredActions": "إجراءات مطلوبة", + "xcoms": "(XComs) إكس كوم" + }, + "collapseDetailsPanel": "طي لوحة التفاصيل", + "createdAssetEvent_few": "تم إنشاء واقعات أصل", + "createdAssetEvent_many": "تم إنشاء واقعات أصل", + "createdAssetEvent_one": "تم إنشاء واقعة أصل", + "createdAssetEvent_other": "تم إنشاء واقعات أصل", + "createdAssetEvent_two": "تم إنشاء واقعتا أصل", + "createdAssetEvent_zero": "لم يتم إنشاء أي واقعة أصل", + "dag_few": "Dags", + "dag_many": "Dags", + "dag_one": "Dag", + "dag_other": "Dags", + "dag_two": "2 Dags", + "dag_zero": "لا يوجد أي Dag", + "dagDetails": { + "catchup": "إلحاق", + "concurrency": "تزامن", + "dagRunTimeout": "مهلة تشغيل الDag", + "defaultArgs": "متغيرات افتراضية", + "description": "وصف", + "documentation": "توثيق الDag", + "fileLocation": "موقع الملف", + "hasTaskConcurrencyLimits": "لديه حدود تزامن المهام", + "lastExpired": "انتهت صلاحيته آخر مرة في", + "lastParsed": "تم تحليله آخر مرة في", + "latestDagVersion": "آخر إصدار لDag", + "latestRun": "آخر تشغيل", + "maxActiveRuns": "الحد الأقصى للتشغيلات النشطة", + "maxActiveTasks": "الحد الأقصى للمهام النشطة", + "maxConsecutiveFailedDagRuns": "الحد الأقصى للتشغيلات الفاشلة المتتالية للDag", + "nextRun": "التشغيل التالي", + "owner": "المالك", + "params": "معاملات", + "schedule": "جدولة", + "tags": "وسوم" + }, + "dagId": "معرف Dag", + "dagRun": { + "conf": "تكوين", + "dagVersions": "إصدار(ات) Dag", + "dataIntervalEnd": "نهاية فترة البيانات", + "dataIntervalStart": "بداية فترة البيانات", + "lastSchedulingDecision": "آخر قرار جدولة", + "queuedAt": "في الطابور في", + "runAfter": "تشغيل بعد", + "runType": "نوع التشغيل", + "sourceAssetEvent": "واقعة أصل مصدر", + "triggeredBy": "تم إطلاقه بواسطة", + "triggeringUser": "المستخدم صاحب الاطلاق" + }, + "dagRun_few": "مثيلات تشغيل لDag", + "dagRun_many": "مثيلات تشغيل لDag", + "dagRun_one": "مثيل تشغيل لDag", + "dagRun_other": "مثيلات تشغيل لDag", + "dagRun_two": "مثيلا تشغيل لDag", + "dagRun_zero": "لا يوجد أي مثيل تشغيل لDag", + "dagRunId": "معرف تشغيل الDag", + "dagWarnings": "تحذيرات الDag", + "defaultToGraphView": "عرض الرسم البياني تلقائياً", + "defaultToGridView": "عرض الشبكة تلقائياً", + "direction": "الاتجاه", + "docs": { + "documentation": "توثيق", + "githubRepo": "مستودع GitHub", + "restApiReference": "مرجع REST API" + }, + "duration": "المدة", + "endDate": "تاريخ الانتهاء", + "error": { + "back": "رجوع", + "defaultMessage": "حدث خطأ غير متوقع", + "home": "الصفحة الرئيسية", + "notFound": "غير موجود", + "title": "خطأ" + }, + "expand": { + "collapse": "طي", + "expand": "توسيع", + "hotkey": "e", + "tooltip": "اضغط {{hotkey}} للتوسيع" + }, + "expression": { + "all": "الكل", + "and": "و", + "any": "أي", + "or": "أو" + }, + "logicalDate": "التاريخ المنطقي", + "logout": "تسجيل الخروج", + "logoutConfirmation": "أنت على وشك تسجيل الخروج.", + "mapIndex": "فهرس الخريطة", + "modal": { + "cancel": "إلغاء", + "confirm": "تأكيد", + "delete": { + "button": "حذف", + "confirmation": "هل أنت متأكد من أنك تريد حذف {{resourceName}}؟ لا يمكن التراجع عن هذا الإجراء." + } + }, + "nav": { + "admin": "إدارة", + "assets": "أصول", + "browse": "تصفح", + "dags": "Dags", + "docs": "وثائق", + "home": "الصفحة الرئيسية", + "legacyFabViews": "عروض قديمة", + "plugins": "إضافات", + "security": "الأمان" + }, + "noItemsFound": "لم يتم العثور على {{modelName}}", + "note": { + "add": "إضافة", + "dagRun": "تشغيل Dag", + "label": "وسم", + "placeholder": "أضف ملاحظة...", + "taskInstance": "مثيل المهمة" + }, + "pools": { + "deferred": "مؤجل", + "open": "مفتوح", + "pools_few": "مجمعات", + "pools_many": "مجمعات", + "pools_one": "مجمع", + "pools_other": "مجمعات", + "pools_two": "مجمعان", + "pools_zero": "لا يوجد أي مجمع", + "queued": "في الانتظار", + "running": "قيد التشغيل", + "scheduled": "مجدول" + }, + "runId": "معرف التشغيل", + "runTypes": { + "asset_triggered": "مُشغل بواسطة الأصل", + "backfill": "تعبئة رجعية", + "manual": "يدوي", + "scheduled": "مجدول" + }, + "scroll": { + "direction": { + "bottom": "الأسفل", + "top": "الأعلى" + }, + "tooltip": "اضغط {{hotkey}} للتمرير إلى {{direction}}" + }, + "seconds": "ثواني", + "security": { + "actions": "إجراءات", + "permissions": "صلاحيات", + "resources": "موارد", + "roles": "أدوار", + "users": "مستخدمون" + }, + "selectLanguage": "اختيار اللغة", + "showDetailsPanel": "إظهار لوحة التفاصيل", + "source": { + "hide": "إخفاء المصدر", + "hotkey": "s", + "show": "إظهار المصدر" + }, + "sourceAssetEvent_few": "واقعات أصل مصدر", + "sourceAssetEvent_many": "واقعات أصل مصدر", + "sourceAssetEvent_one": "واقعة أصل مصدر", + "sourceAssetEvent_other": "واقعات أصل مصدر", + "sourceAssetEvent_two": "واقعتا أصل مصدر", + "sourceAssetEvent_zero": "لا يوجد أي واقعة أصل مصدر", + "startDate": "تاريخ البدء", + "state": "الحالة", + "states": { + "deferred": "مؤجل", + "failed": "فشِل", + "no_status": "بلا حالة", + "none": "لا شيء", + "queued": "في الانتظار", + "removed": "محذوف", + "restarting": "إعادة التشغيل", + "running": "قيد التشغيل", + "scheduled": "مجدول", + "skipped": "متجاوز", + "success": "ناجح", + "up_for_reschedule": "جاهز لإعادة الجدولة", + "up_for_retry": "جاهز للمحاولة مرة أخرى", + "upstream_failed": "فشِل في المهام السابقة" + }, + "switchToDarkMode": "تبديل إلى الوضع الداكن", + "switchToLightMode": "تبديل إلى الوضع الفاتح", + "table": { + "completedAt": "اكتمل في", + "createdAt": "تم الإنشاء في", + "filterByTag": "تصفية الDags حسب الوسم", + "filterColumns": "تصفية أعمدة الجدول", + "filterReset_few": "مسح الفلاتر", + "filterReset_many": "مسح الفلاتر", + "filterReset_one": "مسح الفلتر", + "filterReset_other": "مسح الفلاتر", + "filterReset_two": "مسح الفلترين", + "filterReset_zero": "لا يوجد أي فلتر لمسحه", + "from": "من", + "maxActiveRuns": "الحد الأقصى للتشغيلات النشطة", + "noTagsFound": "لم يتم العثور على وسوم", + "tagMode": { + "all": "الكل", + "any": "اي شيء" + }, + "tagPlaceholder": "تصفية حسب الوسم", + "to": "إلى" + }, + "task": { + "documentation": "وثائق المهمة", + "lastInstance": "آخر مثيل", + "operator": "المشغل", + "triggerRule": "احكام التشغيل" + }, + "task_few": "مهام", + "task_many": "مهام", + "task_one": "مهمة", + "task_other": "مهام", + "task_two": "مهمتان", + "task_zero": "لا يوجد أي مهمة", + "taskId": "معرف المهمة", + "taskInstance": { + "dagVersion": "إصدار الDag", + "executor": "منفذ", + "executorConfig": "تكوين المنفذ", + "hostname": "اسم المضيف", + "maxTries": "الحد الأقصى للمحاولات", + "pid": "معرف العملية", + "pool": "مجموعة الموارد", + "poolSlots": "حصص مجموعة الموارد", + "priorityWeight": "وزن الأولوية", + "queue": "طابور", + "queuedWhen": "في الطابور في", + "scheduledWhen": "مجدول في", + "triggerer": { + "assigned": "مُطلِق مُخصص", + "class": "فئة المُطلِق", + "createdAt": "تاريخ إنشاء المُطلِق", + "id": "معرف المُطلِق", + "latestHeartbeat": "آخر نبضة للمُطلِق", + "title": "معلومات المُطلِق" + }, + "unixname": "اسم الUnix" + }, + "taskInstance_few": "مثيلات المهمة", + "taskInstance_many": "مثيلات المهمة", + "taskInstance_one": "مثيل المهمة", + "taskInstance_other": "مثيلات المهمة", + "taskInstance_two": "مثيلا المهمة", + "taskInstance_zero": "لا يوجد أي مثيل للمهمة", + "timeRange": { + "last12Hours": "آخر 12 ساعة", + "last24Hours": "آخر 24 ساعة", + "lastHour": "الساعة الماضية", + "pastWeek": "الأسبوع الماضي" + }, + "timestamp": { + "hide": "إخفاء الأختام الزمنية", + "hotkey": "t", + "show": "إظهار الأختام الزمنية" + }, + "timezone": "المنطقة الزمنية", + "timezoneModal": { + "current-timezone": "المنطقة الزمنية الحالية", + "placeholder": "اختر المنطقة الزمنية", + "title": "اختيار المنطقة الزمنية", + "utc": "UTC (التوقيت العالمي المنسق)" + }, + "toaster": { + "bulkDelete": { + "error": "فشل طلب حذف الرُزمة ل{{resourceName}}", + "success": { + "description": "تم حذف {{count}} {{resourceName}} بنجاح. المفاتيح: {{keys}}", + "title": "تم إرسال طلب الحذف المجمع ل{{resourceName}}" + } + }, + "create": { + "error": "فشل طلب الإنشاء {{resourceName}}", + "success": { + "description": "تم إنشاء {{resourceName}} بنجاح.", + "title": "تم إرسال طلب الإنشاء {{resourceName}}" + } + }, + "delete": { + "error": "فشل طلب حذف {{resourceName}}", + "success": { + "description": "تم حذف {{resourceName}} بنجاح.", + "title": "تم إرسال طلب الحذف {{resourceName}}" + } + }, + "import": { + "error": "فشل طلب استيراد {{resourceName}}", + "success": { + "description": "تم استيراد {{count}} {{resourceName}} بنجاح.", + "title": "تم إرسال طلب الاستيراد {{resourceName}}" + } + }, + "update": { + "error": "فشل طلب تحديث {{resourceName}}", + "success": { + "description": "تم تحديث {{resourceName}} بنجاح.", + "title": "تم إرسال طلب التحديث {{resourceName}}" + } + } + }, + "total": "إجمالي {{state}}", + "triggered": "مُشغل", + "tryNumber": "محاولة رقم", + "user": "المستخدم", + "wrap": { + "hotkey": "w", + "tooltip": "اضغط {{hotkey}} لتبديل الالتفاف", + "unwrap": "إلغاء التفاف", + "wrap": "التفاف" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/components.json new file mode 100644 index 0000000000000..ab1f4b4a36020 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/components.json @@ -0,0 +1,164 @@ +{ + "backfill": { + "affected_few": "{{count}} مهام ستُشغّل.", + "affected_many": "{{count}} مهمة ستُشغّل.", + "affected_one": "مهمة واحدة ستُشغّل.", + "affected_other": "{{count}} مهمة ستُشغل.", + "affected_two": "مهمتان ستُشغّلان.", + "affected_zero": "لا توجد أي مهمة ستُشغَّل.", + "affectedNone": "لا توجد مهام تطابق المعايير المحددة.", + "allRuns": "جميع التشغيلات", + "backwards": "تشغيل رجعي", + "dateRange": "نطاق التاريخ", + "dateRangeFrom": "من", + "dateRangeTo": "إلى", + "errorStartDateBeforeEndDate": "يجب أن يكون تاريخ البدء قبل تاريخ الانتهاء", + "maxRuns": "الحد الأقصى للتشغيلات النشطة", + "missingAndErroredRuns": "تشغيلات مفقودة وخاطئة", + "missingRuns": "تشغيلات مفقودة", + "reprocessBehavior": "اعادة معالجة السلوك", + "run": "تشغيل التعبئة الرجعية", + "selectDescription": "تشغيل هذا Dag لنطاق من التواريخ", + "selectLabel": "تعبئة رجعية", + "title": "تشغيل التعبئة الرجعية", + "toaster": { + "success": { + "description": "تم تشغيل مهام التعبئة الرجعية بنجاح.", + "title": "تم إنشاء التعبئة الرجعية" + } + }, + "tooltip": "التعبئة الرجعية تتطلب جدول زمني", + "unpause": "إلغاء إيقاف {{dag_display_name}} عند التشغيل", + "validation": { + "datesRequired": "يجب توفير بيانات كل من تاريخ بدء فترة وتاريخ الانتهاء.", + "startBeforeEnd": "بيانات فترة تاريخ البدء يجب ان تكون من او قبل تاريخ الانتهاء. " + } + }, + "banner": { + "backfillInProgress": "التعبئة الرجعية قيد التنفيذ", + "cancel": "إلغاء التعبئة الرجعية", + "pause": "إيقاف التعبئة الرجعية", + "unpause": "إلغاء إيقاف التعبئة الرجعية" + }, + "clipboard": { + "copy": "نسخ" + }, + "close": "إغلاق", + "configForm": { + "advancedOptions": "خيارات متقدمة", + "configJson": "تكوين JSON", + "invalidJson": "تنسيق JSON غير صالح: {{errorMessage}}" + }, + "dagWarnings": { + "error_few": "أخطاء", + "error_many": "أخطاء", + "error_one": "خطأ", + "error_other": "أخطاء", + "error_two": "خطآن", + "error_zero": "لا يوجد أي خطأ", + "errorAndWarning": "1 خطأ و{{warningText}}", + "warning_few": "{{count}} تحذيرات", + "warning_many": "{{count}} تحذير", + "warning_one": "1 تحذير", + "warning_other": "{{count}} تحذير", + "warning_two": "تحذيران", + "warning_zero": "لا يوجد أي تحذير" + }, + "durationChart": { + "duration": "المدة (بالثواني)", + "lastDagRun_few": "آخر {{count}} تشغيلات Dag", + "lastDagRun_many": "آخر {{count}} تشغيل Dag", + "lastDagRun_one": "آخر تشغيل Dag", + "lastDagRun_other": "آخر {{count}} تشغيل Dag", + "lastDagRun_two": "آخر تشغيلين Dag", + "lastDagRun_zero": "لا يوجد أي تشغيل Dag", + "lastTaskInstance_few": "آخر {{count}} مثيلات مهمة", + "lastTaskInstance_many": "آخر {{count}} مثيل مهمة", + "lastTaskInstance_one": "آخر مثيل مهمة", + "lastTaskInstance_other": "آخر {{count}} مثيل مهمة", + "lastTaskInstance_two": "آخر مثيلين مهمة", + "lastTaskInstance_zero": "لا يوجد أي مثيل مهمة", + "queuedDuration": "مدة الانتظار في الطابور", + "runAfter": "تشغيل بعد", + "runDuration": "مدة التشغيل" + }, + "fileUpload": { + "files_few": "{{count}} ملفات", + "files_many": "{{count}} ملف", + "files_one": "ملف", + "files_other": "{{count}} ملف", + "files_two": "ملفان", + "files_zero": "لا يوجد أي ملف" + }, + "flexibleForm": { + "placeholder": "اختر قيمة", + "placeholderArray": "أدخل كل سلسلة في سطر جديد", + "placeholderExamples": "ابدأ الكتابة لرؤية الخيارات", + "placeholderMulti": "اختر قيمة أو أكثر", + "validationErrorArrayNotArray": "القيمة يجب أن تكون مصفوفة.", + "validationErrorArrayNotNumbers": "جميع العناصر في المصفوفة يجب أن تكون أرقامًا.", + "validationErrorArrayNotObject": "جميع العناصر في المصفوفة يجب أن تكون كائنات.", + "validationErrorRequired": "هذا الحقل مطلوب" + }, + "graph": { + "directionDown": "من الأعلى إلى الأسفل", + "directionLeft": "من اليمين إلى اليسار", + "directionRight": "من اليسار إلى اليمين", + "directionUp": "من الأسفل إلى الأعلى", + "downloadImage": "تحميل صورة الرسم البياني", + "downloadImageError": "فشل تحميل صورة الرسم البياني.", + "downloadImageErrorTitle": "فشل التحميل", + "otherDagRuns": "+تشغيلات Dag أخرى", + "taskCount_few": "{{count}} مهام", + "taskCount_many": "{{count}} مهمة", + "taskCount_one": "{{count}} مهمة", + "taskCount_other": "{{count}} مهمة", + "taskCount_two": "{{count}} مهمتان", + "taskCount_zero": "لا يوجد أي مهمة", + "taskGroup": "مجموعة المهام" + }, + "limitedList": "+{{count}} المزيد", + "logs": { + "file": "ملف", + "location": "سطر {{line}} في {{name}}" + }, + "reparseDag": "إعادة تحليل Dag", + "sortedAscending": "الترتيب تصاعدي", + "sortedDescending": "الترتيب تنازلي", + "sortedUnsorted": "غير مرتب", + "taskTries": "محاولات المهمة", + "toggleCardView": "عرض البطاقة", + "toggleTableView": "عرض الجدول", + "triggerDag": { + "button": "تشغيل", + "loading": "جارٍ تحميل معلومات Dag...", + "loadingFailed": "فشل تحميل معلومات Dag. يرجى المحاولة مرة أخرى.", + "runIdHelp": "اختياري - سيتم توليده تلقائيًا إذا لم يتم توفيره.", + "selectDescription": "تشغيل عملية واحدة من هذا Dag", + "selectLabel": "تشغيلة واحدة", + "title": "تشغيل Dag", + "toaster": { + "success": { + "description": "تم تشغيل عملية Dag بنجاح.", + "title": "تم تشغيل Dag" + } + }, + "unpause": "إلغاء إيقاف {{dagDisplayName}} عند التشغيل" + }, + "trimText": { + "details": "التفاصيل", + "empty": "فارغ", + "noContent": "لا توجد محتويات متاحة." + }, + "versionDetails": { + "bundleLink": "رابط الحزمة", + "bundleName": "اسم الحزمة", + "bundleVersion": "إصدار الحزمة", + "createdAt": "تاريخ الإنشاء", + "versionId": "معرف الإصدار" + }, + "versionSelect": { + "dagVersion": "إصدار Dag", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/dag.json new file mode 100644 index 0000000000000..800daf360b7e2 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/dag.json @@ -0,0 +1,141 @@ +{ + "allRuns": "جميع التشغيلات", + "blockingDeps": { + "dependency": "التبعية", + "reason": "السبب", + "title": "التبعيات التي تمنع جدولة المهمة" + }, + "code": { + "bundleUrl": "حزمة URL", + "noCode": "'لا توجد 'كود", + "parsedAt": "تم تحليلها في:" + }, + "extraLinks": "روابط إضافية", + "grid": { + "buttons": { + "resetToLatest": "إعادة تعيين إلى الأحدث", + "toggleGroup": "تفعيل المجموعة" + } + }, + "header": { + "buttons": { + "advanced": "متقدم", + "dagDocs": "وثائق Dag" + } + }, + "logs": { + "allLevels": "جميع مستويات السجل", + "allSources": "جميع المصادر", + "critical": "حرِج", + "debug": "تصحيح", + "error": "خطأ", + "fullscreen": { + "button": "ملء الشاشة", + "tooltip": "اضغط على {{hotkey}} للدخول إلى وضع ملء الشاشة" + }, + "info": "معلومات", + "noTryNumber": "لا يوجد رقم محاولة", + "settings": "سجل الاعدادات", + "viewInExternal": "عرض السجلات في {{name}} (المحاولة {{attempt}})", + "warning": "تحذير" + }, + "navigation": { + "jump": "الانتقال: Shift+{{arrow}}", + "navigation": "التنقل: {{arrow}}", + "toggleGroup": "تبديل المجموعة: المسافة" + }, + "overview": { + "buttons": { + "failedRun_few": "فشل تشغيلات", + "failedRun_many": "فشل تشغيلات", + "failedRun_one": "فشل تشغيل", + "failedRun_other": "فشل تشغيلات", + "failedRun_two": "فشل تشغيلين", + "failedRun_zero": "لا يوجد أي فشل في التشغيل", + "failedTask_few": "فشل مهام", + "failedTask_many": "فشل مهام", + "failedTask_one": "فشل مهمة", + "failedTask_other": "فشل مهام", + "failedTask_two": "فشل مهمتين", + "failedTask_zero": "لا يوجد أي فشل في المهمة", + "failedTaskInstance_few": "فشل مثيلات مهمة", + "failedTaskInstance_many": "فشل مثيلات مهمة", + "failedTaskInstance_one": "فشل مثيل مهمة", + "failedTaskInstance_other": "فشل مثيلات مهمة", + "failedTaskInstance_two": "فشل مثيلين مهمة", + "failedTaskInstance_zero": "لا يوجد أي فشل في مثيلات مهمة" + }, + "charts": { + "assetEvent_few": "تم إنشاء واقعات أصل", + "assetEvent_many": "تم إنشاء واقعات أصل", + "assetEvent_one": "تم إنشاء واقعة أصل", + "assetEvent_other": "تم إنشاء واقعات أصل", + "assetEvent_two": "تم إنشاء واقعتين أصل", + "assetEvent_zero": "لا توجد أي واقعة أصل" + }, + "failedLogs": { + "title": "سجلات المهام الفاشلة الأخيرة", + "viewFullLogs": "عرض السجلات الكاملة" + } + }, + "panel": { + "buttons": { + "options": "خيارات", + "showGraph": "عرض الرسم البياني", + "showGrid": "عرض الشبكة" + }, + "dagRuns": { + "label": "عدد تشغيلات Dag" + }, + "dependencies": { + "label": "التبعيات", + "options": { + "allDagDependencies": "جميع تبعيات Dag", + "externalConditions": "الشروط الخارجية", + "onlyTasks": "المهام فقط" + }, + "placeholder": "التبعيات" + }, + "graphDirection": { + "label": "اتجاه الرسم البياني" + } + }, + "paramsFailed": "فشل في تحميل المعلمات", + "parse": { + "toaster": { + "error": { + "description": "فشل في إعادة تحليل Dag. يمكن أن يكون بسبب تحليل جاري حالياً", + "title": "فشل في إعادة تحليل Dag" + }, + "success": { + "description": "Dag سوف يعاد تحليله قريباً.", + "title": "تم إعادة تحليل Dag بنجاح" + } + } + }, + "tabs": { + "assetEvents": "أحداث الأصول", + "auditLog": "سجل التدقيق", + "backfills": "التعبئة الرجعية", + "code": "الكود", + "details": "التفاصيل", + "logs": "السجلات", + "mappedTaskInstances_few": "مثيلات مهمة [{{count}}]", + "mappedTaskInstances_many": "مثيل مهمة [{{count}}]", + "mappedTaskInstances_one": "مثيل مهمة [{{count}}]", + "mappedTaskInstances_other": "مثيل مهمة [{{count}}]", + "mappedTaskInstances_two": "مثيلان لمهمة [{{count}}]", + "mappedTaskInstances_zero": "لا توجد أي مثيل مهمة", + "overview": "نظرة عامة", + "renderedTemplates": "القوالب المعروضة", + "requiredActions": "إجراءات مطلوبة", + "runs": "تشغيلات", + "taskInstances": "مثيلات المهام", + "tasks": "المهام", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "طي جميع مجموعات المهام", + "expandAll": "توسيع جميع مجموعات المهام" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/dags.json new file mode 100644 index 0000000000000..b7f8e60bbd8b5 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "تم تحديث {{count}} من أصل {{total}}", + "dagActions": { + "delete": { + "button": "حذف الDag", + "warning": "سيؤدي هذا إلى إزالة جميع البيانات الوصفية المرتبطة بالDag، بما في ذلك التشغيلات والمهام." + } + }, + "favoriteDag": "تفضيل الDag", + "filters": { + "allRunTypes": "جميع أنواع التشغيلات", + "allStates": "جميع الحالات", + "favorite": { + "all": "الكل", + "favorite": "تفضيل", + "unfavorite": "إلغاء التفضيل" + }, + "paused": { + "active": "مفعّل", + "all": "الكل", + "paused": "متوقف" + }, + "runIdPatternFilter": "بحث تشغيلات الDag", + "triggeringUserNameFilter": "البحث حسب المستخدم صاحب الاطلاق" + }, + "ownerLink": "رابط المالك لـ{{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "لم يتم العثور على مهام.", + "title": "المهام المتأثرة: {{count}}" + }, + "clear": { + "button": "مسح {{type}}", + "buttonTooltip": "اضغط shift+c للمسح", + "error": "حدث خطأ أثناء المسح {{type}}", + "title": "مسح {{type}}" + }, + "delete": { + "button": "حذف {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "حذف {{type}}", + "warning": "سيؤدي هذا إلى إزالة جميع البيانات الوصفية المرتبطة بـ{{type}}." + }, + "error": "حدث خطأ اثناء الحذف {{type}}", + "success": { + "description": "تم تنفيذ طلب الحذف {{type}} بنجاح.", + "title": "{{type}} حُذف بنجاح" + } + }, + "markAs": { + "button": "تمييز {{type}} كـ...", + "buttonTooltip": { + "failed": "اضغط shift+f لتمييز كفاشل", + "success": "اضغط shift+s لتمييز كناجح" + }, + "title": "تمييز {{type}} كـ{{state}}" + }, + "options": { + "downstream": "المهام اللاحقة", + "existingTasks": "مسح المهام الحالية", + "future": "المستقبل", + "onlyFailed": "مسح المهام الفاشلة فقط", + "past": "الماضي", + "queueNew": "إضافة مهام جديدة للانتظار", + "runOnLatestVersion": "تشغيل بآخر إصدار الحزمة", + "upstream": "المهام السابقة" + } + }, + "search": { + "advanced": "بحث متقدم", + "clear": " مسح", + "dags": "ابحث عن Dags", + "hotkey": "+K", + "tasks": "ابحث عن مهام" + }, + "sort": { + "displayName": { + "asc": "الترتيب حسب اسم العرض (A-Z)", + "desc": "الترتيب حسب اسم العرض (Z-A)" + }, + "lastRunStartDate": { + "asc": "الترتيب حسب تاريخ بداية آخر تشغيل (الأقدم-الأحدث)", + "desc": "الترتيب حسب تاريخ بداية آخر تشغيل (الأحدث-الأقدم)" + }, + "lastRunState": { + "asc": "الترتيب حسب حالة آخر تشغيل (أ-ي)", + "desc": "الترتيب حسب حالة آخر تشغيل (ي-أ)" + }, + "nextDagRun": { + "asc": "الترتيب حسب التشغيل التالي (الأقدم-الأحدث)", + "desc": "الترتيب حسب التشغيل التالي (الأحدث-الأقدم)" + }, + "placeholder": "الترتيب حسب" + }, + "unfavoriteDag": "إلغاء تفضيل الDag" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/dashboard.json new file mode 100644 index 0000000000000..e6fc009d0bc66 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/dashboard.json @@ -0,0 +1,53 @@ +{ + "favorite": { + "favoriteDags_few": "أول {{count}} Dags مفضلة", + "favoriteDags_many": "أول {{count}} Dags مفضلة", + "favoriteDags_one": "أول Dag مفضل", + "favoriteDags_other": "أول {{count}} Dags مفضلة", + "favoriteDags_two": "أول 2 Dags المفضلة", + "favoriteDags_zero": "لا توجد أي Dags مفضلة", + "noDagRuns": "لا توجد تشغيلات لهذا الDag", + "noFavoriteDags": "لا توجد مفضلات بعد. انقر على أيقونة النجمة بجانب Dag في القائمة لإضافته إلى مفضلاتك." + }, + "group": "المجموعة", + "health": { + "dagProcessor": "معالج الDag", + "health": "الصحة", + "healthy": "سليم", + "lastHeartbeat": "آخر نبضة", + "metaDatabase": "قاعدة البيانات الوصفية", + "scheduler": "المُجَدْوِل", + "status": "الحالة", + "triggerer": "المُطلِق", + "unhealthy": "غير سليم" + }, + "history": "التاريخ", + "importErrors": { + "dagImportError_few": "أخطاء في استيراد Dag", + "dagImportError_many": "أخطاء في استيراد Dag", + "dagImportError_one": "خطأ واحد في استيراد Dag", + "dagImportError_other": "أخطاء في استيراد Dag", + "dagImportError_two": "خطآن في استيراد Dag", + "dagImportError_zero": "لا يوجد أي خطأ في استيراد Dag", + "searchByFile": "البحث بالملف", + "timestamp": "الختم الزمني" + }, + "managePools": "ادارة مجموعة الموارد", + "noAssetEvents": "لم يتم العثور على وقائع الأصول.", + "poolSlots": "حصص مجموعة الموارد", + "sortBy": { + "newestFirst": "الأحدث أولاً", + "oldestFirst": "الأقدم أولاً" + }, + "source": "المصدر", + "stats": { + "activeDags": "Dags النشطة", + "failedDags": "Dags الفاشلة", + "queuedDags": "Dags في الانتظار", + "requiredActions": "إجراءات مطلوبة", + "runningDags": "Dags قيد التشغيل", + "stats": "الإحصائيات" + }, + "uri": "Uri", + "welcome": "مرحباً" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ar/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/ar/hitl.json new file mode 100644 index 0000000000000..529387f85a14c --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ar/hitl.json @@ -0,0 +1,27 @@ +{ + "requiredAction_few": "إجراءات مطلوبة", + "requiredAction_many": "لإجراءات مطلوبة", + "requiredAction_one": "إجراء مطلوب", + "requiredAction_other": "إجراءات مطلوبة", + "requiredAction_two": "إجراءان مطلوبان", + "requiredAction_zero": "لا إجراءات مطلوبة", + "requiredActionState": "حالة الإجراء المطلوب", + "response": { + "error": "فشل في الاستجابة", + "optionsDescription": "اختر الخيارات لهذا مثيل المهمة", + "optionsLabel": "الخيارات", + "received": "تم استلام الاستجابة في ", + "respond": "الرد", + "success": "تمت الاستجابة بنجاح ل{{taskId}}", + "title": "مثيل مهمة يدوي - {{taskId}}" + }, + "state": { + "approvalReceived": "تم استلام الموافقة", + "approvalRequired": "تتطلب موافقة", + "choiceReceived": "تم استلام الاختيار", + "choiceRequired": "يتطلب اختيارًا", + "rejectionReceived": "تم استلام الرفض", + "responseReceived": "تم استلام الاستجابة", + "responseRequired": "تتطلب استجابة" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/README.md b/airflow-core/src/airflow/ui/public/i18n/locales/de/README.md new file mode 100644 index 0000000000000..f0e0767e70d26 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/README.md @@ -0,0 +1,124 @@ + + +# Deutsche UI Übersetzung für Apache Airflow + +Dieses Dokument beschreibt die Grundsätze der Übersetzung, die für die Deutsche +Sprache gewählt wurde. Es soll dokumentieren warum die Übersetzungen so gewählt +wurden und damit neben einer Begründung eine möglichst einheitliche Konsistenz +zukünftiger Übersetzungsiterationen ermöglichen. + +## Neutrale und formelle Anrede + +Im Deutschen wird im Vergleich zu der Englischen Sprache die förmliche- von der +normalen Anrede unterschieden +([Siehe: Wikipedia](https://de.wikipedia.org/wiki/Anrede)). Für die Deutsche +Übersetzung und unter der Annahme des "Nicht-Wissens" des Benutzerkreises wurde +die förmliche Anrede verwendet. + +## Feststehende Terme + +Die folgenden Begriffe wurden bewusst nicht aus dem Englischen übersetzt: + +- Dag / Dags: Nach der Diskussion in der Devlist in + (["Airflow should deprecate the term "DAG" for end users"](https://lists.apache.org/thread/lktrzqkzrpvc1cyctxz7zxfmc0fwtq2j)) + und der globalen Umbenennung aller Nutzung von `DAG` zu `Dag` als neuem + feststehenden Begriff in + ([[LAZY CONSENSUS] rename DAGs to dags / Dags in docs](https://lists.apache.org/thread/24hs06s39l73gj2h4o8l5dr2czgg2gw0)) + ist es sinnvoll diesen Begriff als markenähnlichen Begriff in Airflow + konsistent mit der weit verbreiteten Verwendung des Begriffs in der Dokumentation zu behalten. Die deutsche Übersetzung als + "Workflow" wäre vermutlich eher irreführend und es ist anzunehmen dass die + Nutzer von Airflow den Begriff zuordnen können. + Der Begriff `Dag` wird in der deutschen Übersetzung im Neutrum verwendet. +- Log level "CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG" in dag.json + Abschnitt "logs": Diese Begriffe werden in den feststehenden Logs im Text + auch ausgegeben, deswegen werden sie nicht in das Deutsche übertragen. + +(Derzeit keine weiteren feststehenden Begriffe) + +## Definitionen von Übersetzungen von Airflow-Spezifischen Termen + +Für die Deutsche Übersetzung wurden die folgenden Terme wie folgt übersetzt +(in alphabethischer Reihenfolge): + +- `Asset`/`Assets` --> `Datenset (Asset)`/`Datensets (Assets)`: + Da der Begriff in Airflow 3 neu eingeführt wurde steht er derzeit nicht fest. + Daher eignet er sich zu der inhaltlich passenden Übersetzung. Um neue + Benutzer nicht zu verwirren wird der durch Airflow definierte Originalterm in + Klammern wenn möglich mitgeführt. +- `Asset Event` --> `Ereignis zu Datenset (Asset)`: Logische Konsequenz der + Übersetzung von -->"Asset" ohne einen sperrigen Begriff wie + "Datenssatz-Ereignis" zu erzeugen. +- `Backfill` --> `Auffüllen`: Der technisch geprägte Term im Programm passt zu + der direkten Übersetzung, auch im Deutschen werden Lücken wieder "aufgefüllt". +- `Bundle` --> `Bündel`: Die direkte Übersetzung passt zu dem Ziel der + Englischen Begriffsdefinition. +- `Catchup` --> `Nachholen`: Direkte Übersetzung. +- `Connections` --> `Verbindungen`: Ist zwar ein feststehender Begriff in + Airflow und ein technisches Konstrukt das im Code zu finden ist, jedoch + direkt übersetzbar und erschließt sich damit neuen Benutzern direkt. +- `Dag ID`: Unübersetzt. "ID" sollte nach Duden fovorisiert groß geschrieben + werden. +- `Dag Run` --> `Dag Lauf`: Auch wenn der Begriff Run im Code und in Logs oft + zu finden ist, ist eine Eindeutschung für das Gesamtbild im UI vorteilhaft - + abgesehen von dem feststehenden Begriff -->"Dag". +- `Deferred` (Status) --> `Delegiert`: Im Deutschen ist die Übersetzung nur + teilweise passend und der Begriff "Delegiert" ist am nächsten an der + Original-Bedeutung da ein Task zu der Triggerer-Komponente weitergegeben wird. +- `Docs` --> `Doku`: Die direkte Übersetzung wäre eigentlich "Dokumentation" + gewesen aber ohne Wort-Trennung wäre der übersetzte Begriff einige Pixel zu + breit für die Navigationsleiste. Deswegen wurde der im Deutschen auch + gängige Begriff gewählt. +- `Map Index` --> `Planungs-Index`: Da es hier keine direkt passende Übersetzung + gibt und der Begriff "Mapping" eigentlich übersetzbar ist - aber in dem + genutzen Kontext irreführend wäre, wurde hier auf die Task-Planung verwiesen + in der ein Task aufgeplant wird. +- `Operator` --> `Operator`: Da es sich hier um den vor allem mathematisch- + technischen Begriff der Implementierung handelt, passt dieser Begriff am + ehesten. Alternativen wie "Betreiber-Implementierung" sind sehr sperrig. + Wir nutzen weiter den Begriff weil er im Programmcode sich auch so + wiederfindet. +- `Plugins` --> `Plug-ins`: Nach Duden empfohlen. +- `Pools` (Unübersetzt): Der Englische Term ist so im Deutschen direkt + verständlich. Ein präzise Übersetzung als "Ressourcen-Pool" wäre zu sperrig + und andere Übersetzungsoptionen wie "Schwimmbad" irreführend. Insofern ist + "Pool" als Kurzform von "Ressource-Pool" anzusehen. +- `Provider` (Unübersetzt): Eine direkte Übersetzung in andere Begriffe + verbessert nicht das Verständnis. Der Term ist im Deutschen so gut + verständlich. +- `Scheduled` --> `Geplant`: Vor allem genutzt für zyklisch laufende Dags. +- `Tag` --> `Markierung`: Kennzeichnen von Dags zur besseren Ordnung. +- `Task ID`: Unübersetzt. "ID" sollte nach Duden fovorisiert groß geschrieben + werden. +- `Task Instance` --> `Task Instanz`: Der Begriff Task wird im Deutschen + genutzt und passt zu der technischen Nutzung in Airflow. Alternativ wäre + "Aufgabe" eine mögliche Übersetzung gewesen. Da aber der Begriff Task auch in + Logs und Code zu finden ist, lag der Begriff etwas näher als "Aufgabe". +- `Trigger`(to) --> `Auslösen`: Genutzt für die Aktionen ein Lauf eines Dag zu + starten. Von allen Optionen der am ehesten passende Begriff auch wenn es eine + direkte Nutzung des Begriffs "Triggern" im Deutschen gibt. Der Begriff + "Anstoßen" ist auch passend aber in dem Zusammenhan mit Trigger Rule zur + Konsistenz ist "Auslösen" passender. +- `Trigger Rule` --> `Auslöse-Regel`: Im Ablauf eines Dags bestimmt die + Auslöse-Regel (in Kombination mit der Position) jedes Tasks unter welchen + Bedingungen und zu welchem Zeitpunkt im Dag Lauf dieser Task gestartet werden + kann. +- `Try Number` --> `Versuch Nummer`: Direkt Übersetzung ist passend. + +(Andere klassische Begriffsübersetzungen nicht im Einzelnen aufgeführt) diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/admin.json new file mode 100644 index 0000000000000..a5299dc13a9d7 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/admin.json @@ -0,0 +1,167 @@ +{ + "columns": { + "description": "Beschreibung", + "key": "Name", + "name": "Name", + "value": "Wert" + }, + "config": { + "columns": { + "section": "Abschnitt" + }, + "title": "Airflow Konfiguration" + }, + "connections": { + "add": "Verbindung hinzufügen", + "columns": { + "connectionId": "Verbindungs-ID", + "connectionType": "Verbindungstyp", + "host": "Hostname", + "port": "Port" + }, + "connection_one": "Verbindung", + "connection_other": "Verbindungen", + "delete": { + "deleteConnection_one": "1 Verbindung löschen", + "deleteConnection_other": "{{count}} Verbindungen löschen", + "firstConfirmMessage_one": "Die folgende Verbindung wird gelöscht:", + "firstConfirmMessage_other": "Die folgenden Verbindungen werden gelöscht:", + "title": "Verbindung löschen" + }, + "edit": "Verbindung bearbeiten", + "form": { + "connectionIdRequired": "Verbindungs-ID ist erforderlich", + "connectionIdRequirement": "Die Verbindungs-ID kann nicht nur aus Leerzeichen bestehen", + "connectionTypeRequired": "Die Auswahl eines Verbindungstyps ist erforderlich", + "extraFields": "Weitere Felder", + "extraFieldsJson": "JSON Ansicht der weiteren Felder", + "helperText": "Fehlt ein Verbindungstyp? Stellen Sie sicher, dass die notwendigen Provider-Pakete installiert sind.", + "helperTextForRedactedFields": "Unkenntlich gemachte Felder ('***') bleiben unverändert, wenn sie nicht bearbeitet werden.", + "selectConnectionType": "Verbindungstyp auswählen", + "standardFields": "Standardfelder" + }, + "nothingFound": { + "description": "Verbindungen, die durch Secret-Managers oder Umgebungsvariablen definiert sind werden hier nicht gelistet.", + "documentationLink": "Mehr Details sind in der Airflow-Dokumentation beschrieben.", + "learnMore": "Diese Verbindungen werden erst eingelesen, wenn ein Task, der sie verwendet ausgeführt wird und sie sind daher in dieser Ansicht nicht sichtbar.", + "title": "Keine Verbindungen gefunden!" + }, + "searchPlaceholder": "Verbindungen suchen", + "test": "Verbindung testen", + "testDisabled": "Das Testen von Verbindungen ist deaktiviert. Der Administrator kann via Konfiguration das Testen freischalten.", + "typeMeta": { + "error": "Fehler beim Abrufen der Liste der Verbindungstypen", + "standardFields": { + "description": "Beschreibung", + "host": "Hostname", + "login": "Benutzername", + "password": "Passwort", + "port": "Port", + "url_schema": "Verbindungsschema" + } + } + }, + "deleteActions": { + "button": "Löschen", + "modal": { + "confirmButton": "Ja, löschen", + "secondConfirmMessage": "Diese Aktion kann nicht rückgängig gemacht werden. ", + "thirdConfirmMessage": "Sind Sie sich sicher?" + }, + "selected": "Ausgewählt", + "tooltip": "Ausgewählte Verbindungen löschen" + }, + "formActions": { + "reset": "Zurücksetzen", + "save": "Speichern" + }, + "plugins": { + "columns": { + "source": "Quelle" + }, + "importError_one": "Fehler beim Laden eines Plug-ins", + "importError_other": "Fehler beim Laden von Plug-ins", + "searchPlaceholder": "Nach Datei suchen" + }, + "pools": { + "add": "Pool hinzufügen", + "deferredSlotsIncluded": "Delegierte Tasks einschließen", + "delete": { + "title": "Pool löschen", + "warning": "Dies wird alle Metadaten zu diesem Pool entfernen. Dies kann Tasks beeinträchtigen, die diesen Pool nutzen. Ein Task, der einem nicht existenten Pool zugewiesen ist, kann nicht geplant werden." + }, + "edit": "Pool bearbeiten", + "form": { + "checkbox": "Delegierte Tasks sollen in die Berechnung der Pool-Belegung berücksichtigt werden", + "description": "Beschreibung", + "includeDeferred": "Delegierte Tasks", + "nameMaxLength": "Der Name kann maximal 256 Zeichen lang sein", + "nameRequired": "Ein Name ist erforderlich", + "slots": "Plätze im Pool" + }, + "noPoolsFound": "Keine Pools gefunden", + "pool_one": "Pool", + "pool_other": "Pools", + "searchPlaceholder": "Pools suchen", + "sort": { + "asc": "Name (A-Z)", + "desc": "Name (Z-A)", + "placeholder": "Sortierung" + } + }, + "providers": { + "columns": { + "packageName": "Paketname", + "version": "Version" + } + }, + "variables": { + "add": "Variable hinzufügen", + "columns": { + "isEncrypted": "Wert ist verschlüsselt" + }, + "delete": { + "deleteVariable_one": "1 Variable löschen", + "deleteVariable_other": "{{count}} Variablen löschen", + "firstConfirmMessage_one": "Die folgende Variable wird gelöscht:", + "firstConfirmMessage_other": "Die folgenden Variablen werden gelöscht:", + "title": "Variable löschen", + "tooltip": "Ausgewählte Variablen löschen" + }, + "edit": "Variable bearbeiten", + "export": "Exportieren", + "exportTooltip": "Ausgewählte Variablen exportieren", + "form": { + "invalidJson": "JSON ist ungültig", + "keyMaxLength": "Der Name kann maximal 250 Zeichen lang sein", + "keyRequired": "Name ist erforderlich", + "valueRequired": "Wert ist erforderlich" + }, + "import": { + "button": "Importieren", + "conflictResolution": "Konfliktlösung für Import auswählen", + "errorParsingJsonFile": "JSON-Datei kann nicht gelesen werden: Die Datei muss Schlüssel-/Wertpaare enthalten (z.B., {\"name\": \"wert\", ...}).", + "options": { + "fail": { + "description": "Import abbrechen, wenn eine Variable mit dem gleichen Namen bereits existiert.", + "title": "Abbrechen" + }, + "overwrite": { + "description": "Variable überschreiben, wenn eine Variable mit dem gleichen Namen bereits existiert (der Wert der importierten Variable wird übernommen).", + "title": "Überschreiben" + }, + "skip": { + "description": "Variable überspringen, wenn eine Variable mit dem gleichen Namen bereits existiert (der Wert der bestehenden Variable wird nicht verändert).", + "title": "Überspringen" + } + }, + "title": "Variablen importieren", + "upload": "JSON-Datei hochladen", + "uploadPlaceholder": "JSON-Datei mit Variablen auswählen (z.B., {\"name\": \"wert\", ...})" + }, + "noRowsMessage": "Keine Variablen gefunden", + "searchPlaceholder": "Nach Namen suchen", + "variable_one": "Variable", + "variable_other": "Variablen" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/assets.json new file mode 100644 index 0000000000000..6804f32ffe598 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "Konsumierende Dags", + "createEvent": { + "button": "Ereignis erstellen", + "manual": { + "description": "Direkt ein Ereignis zum Datenset (Asset) erstellen", + "extra": "Extra Daten zu Ereignis", + "label": "Manuell" + }, + "materialize": { + "description": "Das Dag das dieses Datenset (Asset) produziert direkt auslösen", + "descriptionWithDag": "Das Dag das dieses Datenset produziert auslösen: {{dagName}}", + "label": "Materialisieren", + "unpauseDag": "Das Dag {{dagName}} beim Auslösen aktivieren" + }, + "success": { + "manualDescription": "Das manuelle Erzeugen eines Ereignisses zu dem Datenset (Asset) war erfolgreich.", + "manualTitle": "Ereignis zu Datenset erstellt", + "materializeDescription": "Produzierendes Dag {{dagId}} wurde erfolgreich ausgelöst.", + "materializeTitle": "Datenset wird materialisiert" + }, + "title": "Ereignis zu Datenset (Asset) {{name}} erstellen" + }, + "group": "Gruppe", + "lastAssetEvent": "Letztes Ereignis zu Datenset (Asset)", + "name": "Name", + "producingTasks": "Produzierende Tasks", + "scheduledDags": "Geplante Dags", + "searchPlaceholder": "Datenset (Asset) suchen" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/browse.json new file mode 100644 index 0000000000000..dc3b141bd872d --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "Alle Extra JSON Daten einklappen", + "expandAllExtra": "Alle Extra JSON Daten ausklappen" + }, + "columns": { + "event": "Ereignis", + "extra": "Extra JSON Daten", + "user": "Benutzer", + "when": "Datum und Uhrzeit" + }, + "title": "Prüf-Log" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "Name", + "value": "Wert" + }, + "title": "Task Kommunikation (XComs)" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/common.json new file mode 100644 index 0000000000000..2dda276a88537 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/common.json @@ -0,0 +1,302 @@ +{ + "admin": { + "Config": "Konfiguration", + "Connections": "Verbindungen", + "Plugins": "Plug-ins", + "Pools": "Pools", + "Providers": "Providers", + "Variables": "Variablen" + }, + "asset_one": "Datenset (Asset)", + "asset_other": "Datensets (Assets)", + "assetEvent_one": "Ereignis zu Datensets (Asset)", + "assetEvent_other": "Ereignisse zu Datensets (Asset)", + "backfill_one": "Auffüllung", + "backfill_other": "Auffüllungen", + "browse": { + "auditLog": "Prüf-Log", + "requiredActions": "Erforderliche Interaktionen", + "xcoms": "Task Kommunikation (XComs)" + }, + "collapseDetailsPanel": "Detailansicht ausblenden", + "createdAssetEvent_one": "Erstelltes Ereignis zu Datenset (Asset)", + "createdAssetEvent_other": "Erstellte Ereignisse zu Datensets (Assets)", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "Nachgeholt", + "concurrency": "Parallelität", + "dagRunTimeout": "Dag Lauf Zeitüberschreitung", + "defaultArgs": "Standard-Parameter", + "description": "Beschreibung", + "documentation": "Dag Dokumentation", + "fileLocation": "Ablagepfad", + "hasTaskConcurrencyLimits": "Hat der Task Limitierungen zur Parallelität", + "lastExpired": "Letztmaliger Ablauf", + "lastParsed": "Letztmalig Eingelesen", + "latestDagVersion": "Letzte Dag Version", + "latestRun": "Letzter Lauf", + "maxActiveRuns": "Maximal aktive Läufe", + "maxActiveTasks": "Maximal aktive Tasks", + "maxConsecutiveFailedDagRuns": "Maximal fortlaufende fehlerhafte Dag Läufe", + "nextRun": "Nächster Lauf", + "owner": "Eigentümer", + "params": "Parameter", + "schedule": "Zeitplan", + "tags": "Markierungen" + }, + "dagId": "Dag ID", + "dagRun": { + "conf": "Konfiguration", + "dagVersions": "Dag Versionen", + "dataIntervalEnd": "Datenintervall Ende", + "dataIntervalStart": "Datenintervall Start", + "lastSchedulingDecision": "Letzte Planungsentscheidung", + "queuedAt": "Wartend seit", + "runAfter": "Gelaufen ab", + "runType": "Typ des Laufs", + "sourceAssetEvent": "Ursprung des Datenset (Asset) Ereignisses", + "triggeredBy": "Ausgelöst durch", + "triggeringUser": "Benutzer der ausgelöst hat" + }, + "dagRun_one": "Dag Lauf", + "dagRun_other": "Dag Läufe", + "dagRunId": "Dag Lauf ID", + "dagWarnings": "Dag Warnungen und Fehler", + "defaultToGraphView": "Graph-Ansicht als Standard", + "defaultToGridView": "Gitter-Ansicht als Standard", + "direction": "Richtung", + "docs": { + "documentation": "Dokumentation", + "githubRepo": "GitHub Ablage", + "restApiReference": "REST API Referenz" + }, + "duration": "Laufzeit", + "endDate": "Enddatum", + "error": { + "back": "Zurück", + "defaultMessage": "Ein unerwarteter Fehler ist aufgetreten", + "home": "Start", + "notFound": "Seite nicht gefunden", + "title": "Fehler" + }, + "expand": { + "collapse": "Einklappen", + "expand": "Ausklappen", + "hotkey": "e", + "tooltip": "Tastenkombination {{hotkey}} zum Ein-/Ausklappen drücken" + }, + "expression": { + "all": "Alle", + "and": "UND", + "any": "Jeder", + "or": "ODER" + }, + "logicalDate": "Logisches Datum", + "logout": "Abmelden", + "logoutConfirmation": "Sie sind dabei sich von dem System abzumelden.", + "mapIndex": "Planungs-Index", + "modal": { + "cancel": "Abbrechen", + "confirm": "Bestätigen", + "delete": { + "button": "Löschen", + "confirmation": "Sind Sie sicher, dass Sie {{resourceName}} löschen wollen? Diese Aktion kann nicht rückgängig gemacht werden." + } + }, + "nav": { + "admin": "Verwaltung", + "assets": "Datensets (Assets)", + "browse": "Browsen", + "dags": "Dags", + "docs": "Doku", + "home": "Start", + "legacyFabViews": "Alte Ansichten", + "plugins": "Plug-ins", + "security": "Sicherheit" + }, + "noItemsFound": "Kein Element vom Typ {{modelName}} gefunden", + "note": { + "add": "Eine Notiz hinzufügen", + "dagRun": "Notizen zum Dag Lauf", + "label": "Notiz", + "placeholder": "Eine Notiz hinzufügen...", + "taskInstance": "Notizen zu Task Instanzen" + }, + "pools": { + "deferred": "Delegiert", + "open": "Frei", + "pools_one": "Pool", + "pools_other": "Pools", + "queued": "Wartend", + "running": "Laufende", + "scheduled": "Geplant" + }, + "runId": "Lauf Id", + "runTypes": { + "asset_triggered": "Durch Datenset (Asset) ausgelöst", + "backfill": "Nachträglich Aufgefüllt", + "manual": "Manuell", + "scheduled": "Geplant" + }, + "scroll": { + "direction": { + "bottom": "unten", + "top": "oben" + }, + "tooltip": "Tastenkombination {{hotkey}} zum scrollen nach {{direction}}" + }, + "seconds": "{{count}}s", + "security": { + "actions": "Aktionen", + "permissions": "Berechtigungen", + "resources": "Ressourcen", + "roles": "Rollen", + "users": "Benutzer" + }, + "selectLanguage": "Sprache wählen", + "showDetailsPanel": "Detailansicht einblenden", + "source": { + "hide": "Log-Quelle ausblenden", + "hotkey": "s", + "show": "Log-Quelle einblenden" + }, + "sourceAssetEvent_one": "Urspung des Datenset (Asset) Ereignisses", + "sourceAssetEvent_other": "Urspung der Datenset (Asset) Ereignisse", + "startDate": "Startdatum", + "state": "Status", + "states": { + "deferred": "Delegiert", + "failed": "Fehlgeschlagen", + "no_status": "Kein Status", + "none": "Kein Status", + "queued": "Wartend", + "removed": "Entfernt", + "restarting": "Im Neustart", + "running": "Laufend", + "scheduled": "Geplant", + "skipped": "Übersprungen", + "success": "Erfolgreich", + "up_for_reschedule": "Wartet auf Neuplanung", + "up_for_retry": "Wartet auf neuen Versuch", + "upstream_failed": "Vorgelagerte fehlgeschlagen" + }, + "switchToDarkMode": "Zum Dunkelmodus wechseln", + "switchToLightMode": "Zum Hellmodus wechseln", + "table": { + "completedAt": "Abgeschlossen um", + "createdAt": "Erstellt um", + "filterByTag": "Dags nach Markierung filtern", + "filterColumns": "Tabellenspalten filtern", + "filterReset_one": "Filter zurücksetzen", + "filterReset_other": "Filter zurücksetzen", + "from": "Von", + "maxActiveRuns": "Maximal aktive Läufe", + "noTagsFound": "Keine Markierungen gefunden", + "tagMode": { + "all": "Alle", + "any": "Einer" + }, + "tagPlaceholder": "Dags nach Markierung filtern", + "to": "Zu" + }, + "task": { + "documentation": "Task Dokumentation", + "lastInstance": "Letzte Task Instanz", + "operator": "Operator", + "triggerRule": "Auslöse-Regel" + }, + "task_one": "Task", + "task_other": "Tasks", + "taskId": "Task ID", + "taskInstance": { + "dagVersion": "Dag Version", + "executor": "Ausführungsumgebung", + "executorConfig": "Konfiguration der Ausführungsumgebung", + "hostname": "Hostname", + "maxTries": "Maximale Versuche", + "pid": "PID", + "pool": "Pool", + "poolSlots": "Pool Belegung", + "priorityWeight": "Priorität", + "queue": "Warteschlange", + "queuedWhen": "Wartend seit", + "scheduledWhen": "Geplant ab", + "triggerer": { + "assigned": "Zugewiesene Abrufumgebung", + "class": "Abruf-Klasse", + "createdAt": "Zeitpunkt der Erstellung", + "id": "Abrufungs ID", + "latestHeartbeat": "Letztes Lebenszeichen", + "title": "Abrufumgebungs-Information" + }, + "unixname": "Unix Name" + }, + "taskInstance_one": "Task Instanz", + "taskInstance_other": "Task Instanzen", + "timeRange": { + "last12Hours": "Letzte 12 Stunden", + "last24Hours": "Letzte 24 Stunden", + "lastHour": "Letzte Stunde", + "pastWeek": "Letzte Woche" + }, + "timestamp": { + "hide": "Zeitstempel ausblenden", + "hotkey": "t", + "show": "Zeitstempel einblenden" + }, + "timezone": "Zeitzone", + "timezoneModal": { + "current-timezone": "Aktuelle Zeit in", + "placeholder": "Wählen Sie eine Zeitzone", + "title": "Auswahl der Zeitzone", + "utc": "UTC (Koordinierte Weltzeit)" + }, + "toaster": { + "bulkDelete": { + "error": "Massenlöschung von {{resourceName}} fehlgeschlagen", + "success": { + "description": "{{count}} von {{resourceName}} wurden erfolgreich gelöscht. Schlüsselwerte: {{keys}}", + "title": "Massenlöschung von {{resourceName}} gestartet" + } + }, + "create": { + "error": "Erstellung von {{resourceName}} fehlgeschlagen", + "success": { + "description": "{{resourceName}} wurde erfolgreich erstellt.", + "title": "Erstellung von {{resourceName}} gestartet" + } + }, + "delete": { + "error": "Löschen von {{resourceName}} fehlgeschlagen", + "success": { + "description": "{{resourceName}} wurde erfolgreich gelöscht.", + "title": "Löschung von {{resourceName}} gestartet" + } + }, + "import": { + "error": "Import von {{resourceName}} fehlgeschlagen", + "success": { + "description": "{{count}} {{resourceName}} wurden erfolgreich importiert.", + "title": "Import von {{resourceName}} gestartet" + } + }, + "update": { + "error": "Aktualisierung von {{resourceName}} fehlgeschlagen", + "success": { + "description": "{{resourceName}} wurde erfolgreich aktualisiert.", + "title": "Aktualisierung von {{resourceName}} gestartet" + } + } + }, + "total": "Gesamt {{state}}", + "triggered": "Ausgelöst", + "tryNumber": "Versuch Nummer", + "user": "Benutzer", + "wrap": { + "hotkey": "w", + "tooltip": "Buchstabe {{hotkey}} drücken um den Zeilenumbruch umzuschalten", + "unwrap": "Kein Zeilenumbruch", + "wrap": "Zeilenumbruch" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json new file mode 100644 index 0000000000000..95abe63ea6367 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json @@ -0,0 +1,136 @@ +{ + "backfill": { + "affected_one": "1 Lauf wird ausgelöst.", + "affected_other": "{{count}} Läufe werden ausgelöst.", + "affectedNone": "Keine Läufe entsprechen den Kriterien.", + "allRuns": "Alle Läufe", + "backwards": "Verarbeitung in Rückwärtiger Reihenfolge", + "dateRange": "Datumsbereich", + "dateRangeFrom": "Von", + "dateRangeTo": "Bis", + "errorStartDateBeforeEndDate": "Das Startdatum muss vor dem Enddatum liegen.", + "maxRuns": "Anzahl aktiver paralleler Läufe", + "missingAndErroredRuns": "Fehlende und fehlgeschlagene Läufe", + "missingRuns": "Fehlende Läufe", + "reprocessBehavior": "Auffüll-Modus", + "run": "Auffüllung starten", + "selectDescription": "Dieses Dag für einen Datumsbereich in der Vergangenheit ausführen", + "selectLabel": "Auffüllen", + "title": "Auffüllung starten", + "toaster": { + "success": { + "description": "Auffüllungsaufträge wurden erfolgreich angestoßen.", + "title": "Auffüllung gestartet" + } + }, + "tooltip": "Auffüllung benötigt eine Zeitplanung", + "unpause": "Dag {{dag_display_name}} beim Start der Auffüllung aktiv schalten", + "validation": { + "datesRequired": "Sowohl Start- als auch Enddatum müssen angegeben werden.", + "startBeforeEnd": "Das Startdatum muss vor oder auf dem Enddatum liegen." + } + }, + "banner": { + "backfillInProgress": "Auffüllung läuft", + "cancel": "Auffüllung abbrechen", + "pause": "Auffüllung pausieren", + "unpause": "Auffüllung weiterführen" + }, + "clipboard": { + "copy": "Kopieren" + }, + "close": "Schließen", + "configForm": { + "advancedOptions": "Erweiterte Optionen", + "configJson": "JSON der Konfiguration", + "invalidJson": "Ungültiges JSON Format: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "1 Fehler", + "error_other": "{{count}} Fehler", + "errorAndWarning": "1 Fehler und {{warningText}}", + "warning_one": "1 Warnung", + "warning_other": "{{count}} Warnungen" + }, + "durationChart": { + "duration": "Laufzeit (Sekunden)", + "lastDagRun_one": "Letzter Dag Lauf", + "lastDagRun_other": "Letzte {{count}} Dag Läufe", + "lastTaskInstance_one": "Letzte Task Instanz", + "lastTaskInstance_other": "Letzte {{count}} Task Instanzen", + "queuedDuration": "Zeit in der Warteschlange", + "runAfter": "Lauf ab", + "runDuration": "Laufzeit" + }, + "fileUpload": { + "files_one": "1 Datei", + "files_other": "{{count}} Dateien" + }, + "flexibleForm": { + "placeholder": "Wählen Sie einen Wert", + "placeholderArray": "Fügen Sie einen Eintrag pro Zeile um eine Liste zu erzeugen", + "placeholderExamples": "Tippen Sie ein paar Buchstaben für Vorschläge", + "placeholderMulti": "Wählen Sie einen oder mehrere Werte", + "validationErrorArrayNotArray": "Die Daten sind keine Liste.", + "validationErrorArrayNotNumbers": "Alle Elemente in der Liste müssen numerisch sein.", + "validationErrorArrayNotObject": "Alle Elemente in der Liste müssen Objekte (Dictionary) sein.", + "validationErrorRequired": "Dieses Feld ist ein Pflichtfeld und muss gefüllt werden" + }, + "graph": { + "directionDown": "Von Oben nach Unten", + "directionLeft": "Von Rechts nach Links", + "directionRight": "Von Links nach Rechts", + "directionUp": "Von Unten nach Oben", + "downloadImage": "Graph-Bild herunterladen", + "downloadImageError": "Herunterladn des Graph-Bild fehlgeschlagen.", + "downloadImageErrorTitle": "Herunterladen fehlgeschlagen", + "otherDagRuns": "+Weitere Dag Läufe", + "taskCount_one": "{{count}} Task", + "taskCount_other": "{{count}} Tasks", + "taskGroup": "Task Gruppe" + }, + "limitedList": "+{{count}} mehr", + "logs": { + "file": "Datei", + "location": "Zeile {{line}} in {{name}}" + }, + "reparseDag": "Dag neu parsen", + "sortedAscending": "aufsteigend sortier", + "sortedDescending": "absteigend sortier", + "sortedUnsorted": "unsortiert", + "taskTries": "Versuch des Tasks", + "toggleCardView": "Kachelansicht anzeigen", + "toggleTableView": "Tabellenansicht anzeigen", + "triggerDag": { + "button": "Auslösen", + "loading": "Lade DAG Information...", + "loadingFailed": "Das Laden der DAG Information fehlgeschlagen. Bitt versuchen Sie es noch einmal.", + "runIdHelp": "Optional - wird automatisch erzeugt wenn nicht angegeben", + "selectDescription": "Einen einzelnen Lauf dieses Dag auslösen", + "selectLabel": "Einzelner Lauf", + "title": "Dag Auslösen", + "toaster": { + "success": { + "description": "Dag-Lauf wurde erfolgreich ausgelöst.", + "title": "Dag-Lauf ausgelöst" + } + }, + "unpause": "Dag {{dagDisplayName}} beim Auslösen des Laufes aktiv schalten" + }, + "trimText": { + "details": "Details", + "empty": "Leer", + "noContent": "Kein Inhalt verfügbar." + }, + "versionDetails": { + "bundleLink": "Bündel-Link", + "bundleName": "Bündel Name", + "bundleVersion": "Bündel Version", + "createdAt": "Erstellt um", + "versionId": "Versions-ID" + }, + "versionSelect": { + "dagVersion": "Dag Version", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/dag.json new file mode 100644 index 0000000000000..9a22f7cd417e2 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/dag.json @@ -0,0 +1,121 @@ +{ + "allRuns": "Alle Läufe", + "blockingDeps": { + "dependency": "Abhängigkeiten", + "reason": "Grund", + "title": "Abhängigkeiten die die Planung des Task blockieren" + }, + "code": { + "bundleUrl": "Bündel Url", + "noCode": "Kein Programmcode gefunden", + "parsedAt": "Eingelesen um:" + }, + "extraLinks": "Extra Links", + "grid": { + "buttons": { + "resetToLatest": "Auf Letzten zurücksetzen", + "toggleGroup": "Gruppen umschalten" + } + }, + "header": { + "buttons": { + "advanced": "Erweitert", + "dagDocs": "Dag Dokumentation" + } + }, + "logs": { + "allLevels": "Alle Protokoll-Stufen", + "allSources": "Alle Quellen", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", + "fullscreen": { + "button": "Vollbild", + "tooltip": "Taste {{hotkey}} für Vollbildmodus" + }, + "info": "INFO", + "noTryNumber": "Keine Versuchsnummer", + "settings": "Protokoll-Ansicht", + "viewInExternal": "Protokoll in {{name}} (Versuch {{attempt}}) ansehen", + "warning": "WARNING" + }, + "navigation": { + "jump": "Springen: Umschalttaste+{{arrow}}", + "navigation": "Navigation: {{arrow}}", + "toggleGroup": "Gruppen umschalten: Leertaste" + }, + "overview": { + "buttons": { + "failedRun_one": "Fehlgeschlagener Lauf", + "failedRun_other": "Fehlgeschlagene Läufe", + "failedTask_one": "Fehlgeschlagener Task", + "failedTask_other": "Fehlgeschlagene Tasks", + "failedTaskInstance_one": "Fehlgeschlagene Task Instanz", + "failedTaskInstance_other": "Fehlgeschlagene Task Instanzen" + }, + "charts": { + "assetEvent_one": "Erstelltes Datenset-Ereignis", + "assetEvent_other": "Erstellte Datenset-Ereignisse" + }, + "failedLogs": { + "title": "Protokolle kürzlich fehlgeschlagener Tasks", + "viewFullLogs": "Vollständige Protokolle ansehen" + } + }, + "panel": { + "buttons": { + "options": "Optionen", + "showGraph": "Graph zeigen", + "showGrid": "Gitter zeigen" + }, + "dagRuns": { + "label": "Anzahl von Dag Läufen" + }, + "dependencies": { + "label": "Abhängigkeiten", + "options": { + "allDagDependencies": "Alle Dag Abhängigkeiten", + "externalConditions": "Externe Bedingungen", + "onlyTasks": "Nur Tasks" + }, + "placeholder": "Abhängigkeiten" + }, + "graphDirection": { + "label": "Richtung des Graph" + } + }, + "paramsFailed": "Laden der Parameter fehlgeschlagen", + "parse": { + "toaster": { + "error": { + "description": "Die Anfrage zum Parsen des Dag ist fehlgeschlagen. Eventuell gibt es schon bestehende Anfragen.", + "title": "Neues Parsen des Dag fehlgeschlagen" + }, + "success": { + "description": "Das Dag sollte gleich neu geparst sein.", + "title": "Anfrage zu Dag parsen erfolgreich gestartet" + } + } + }, + "tabs": { + "assetEvents": "Ereignisse zu Datensets (Asset)", + "auditLog": "Prüf-Log", + "backfills": "Auffüllungen", + "code": "Programmcode", + "details": "Details", + "logs": "Protokolle", + "mappedTaskInstances_one": "Task Instanz [{{count}}]", + "mappedTaskInstances_other": "Task Instanzen [{{count}}]", + "overview": "Übersicht", + "renderedTemplates": "Ausgefüllte Vorlagen", + "requiredActions": "Interaktionen", + "runs": "Läufe", + "taskInstances": "Task Instanzen", + "tasks": "Tasks", + "xcom": "Task Kommunikation (XComs)" + }, + "taskGroups": { + "collapseAll": "Alle Task-Gruppen einklappen", + "expandAll": "Alle Task-Gruppen aufklappen" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/dags.json new file mode 100644 index 0000000000000..a81349c6e17a6 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "{{count}} von {{total}} Datensets (Assets) aktualisiert", + "dagActions": { + "delete": { + "button": "Dag löschen", + "warning": "Diese Aktion löscht alle Metadaten zu diesem Dag mit allen Läufen und Task Instanzen." + } + }, + "favoriteDag": "Dag als Favorit hinzufügen", + "filters": { + "allRunTypes": "Alle Arten von Läufen", + "allStates": "Alle Stati", + "favorite": { + "all": "Alle", + "favorite": "Favorisierte", + "unfavorite": "Nicht favorisierte" + }, + "paused": { + "active": "Aktiv", + "all": "Alle", + "paused": "Pausiert" + }, + "runIdPatternFilter": "Dag Läufe suchen", + "triggeringUserNameFilter": "Suche Läufe ausgelöst von..." + }, + "ownerLink": "Besitzer Verlinkungen zu {{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "Keine Tasks gefunden.", + "title": "Betroffene Tasks: {{count}}" + }, + "clear": { + "button": "{{type}} zurücksetzen", + "buttonTooltip": "Umschalttaste+C zum Zurücksetzen tippen", + "error": "Fehler beim Zurücksetzen von {{type}}", + "title": "{{type}} bereinigen und neu planen" + }, + "delete": { + "button": "{{type}} löschen", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "{{type}} löschen", + "warning": "Diese Aktion löscht alle Metadaten zu dem Typ {{type}}." + }, + "error": "Fehler beim Löschen von dem Typ {{type}}", + "success": { + "description": "Das Löschen von {{type}} war erfolgreich.", + "title": "{{type}} gelöscht" + } + }, + "markAs": { + "button": "{{type}} markieren...", + "buttonTooltip": { + "failed": "Umschalttaste+F tippen um als fehlgeschlagen zu markieren", + "success": "Umschalttaste+S tippen um als erfolgreich zu markieren" + }, + "title": "{{type}} auf den Status {{state}} setzen" + }, + "options": { + "downstream": "Nachfolgende", + "existingTasks": "Bestehende Tasks bereinigen", + "future": "Zukünftige", + "onlyFailed": "Nur fehlgeschlagene Tasks bereinigen", + "past": "Vergangene", + "queueNew": "Neue Tasks einplanen", + "runOnLatestVersion": "Mit neuester Bundle-Version ausführen", + "upstream": "Vorangegangene" + } + }, + "search": { + "advanced": "Erweiterte Suche", + "clear": "Suche zurücksetzen", + "dags": "Dags suchen", + "hotkey": "+K", + "tasks": "Tasks suchen" + }, + "sort": { + "displayName": { + "asc": "Sortiert nach Anzeigename (A-Z)", + "desc": "Sortiert nach Anzeigename (Z-A)" + }, + "lastRunStartDate": { + "asc": "Sortiert nach letztem Startdatum (Erster-Letzter)", + "desc": "Sortiert nach letztem Startdatum (Letzter-Erster)" + }, + "lastRunState": { + "asc": "Sortiert nach dem Status des letzten Laufes (A-Z)", + "desc": "Sortiert nach dem Status des letzten Laufes (Z-A)" + }, + "nextDagRun": { + "asc": "Sortiert nach nächstem Laufdatum (Erster-Letzter)", + "desc": "Sortiert nach nächstem Laufdatum (Letzter-Erster)" + }, + "placeholder": "Sortieren nach" + }, + "unfavoriteDag": "Von den Favoriten entfernen" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/dashboard.json new file mode 100644 index 0000000000000..5818efc680935 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/dashboard.json @@ -0,0 +1,45 @@ +{ + "favorite": { + "favoriteDags_one": "Erster favorisierter Dag", + "favoriteDags_other": "Top {{count}} favorisierte Dags", + "noDagRuns": "Noch kein Lauf für dieses Dag.", + "noFavoriteDags": "Noch keine favorisierten Dags. Mit dem Stern-Symbol neben einem Dag kann man das Dag zu den Favoriten hinzufügen." + }, + "group": "Gruppe", + "health": { + "dagProcessor": "Dag Prozessor", + "health": "System-Gesundheit", + "healthy": "Gut", + "lastHeartbeat": "Letztes Lebenszeichen", + "metaDatabase": "Meta-Datenbank", + "scheduler": "Planer", + "status": "Status", + "triggerer": "Triggerer", + "unhealthy": "Nicht in Ordnung" + }, + "history": "Historie", + "importErrors": { + "dagImportError_one": "Fehler beim Laden des Dags", + "dagImportError_other": "Fehler beim Laden der Dags", + "searchByFile": "Nach Datei suchen", + "timestamp": "Zeitstempel" + }, + "managePools": "Pools verwalten", + "noAssetEvents": "Keine Ereignisse zu Datensets vorliegend.", + "poolSlots": "Pool Belegung", + "sortBy": { + "newestFirst": "Neueste zuerst", + "oldestFirst": "Älteste zuerst" + }, + "source": "Quellcode", + "stats": { + "activeDags": "Aktive Dags", + "failedDags": "Fehlgeschlagene Dags", + "queuedDags": "Dags in Warteschlange", + "requiredActions": "Erforderliche Interaktionen", + "runningDags": "Laufende Dags", + "stats": "Statistiken" + }, + "uri": "Uri", + "welcome": "Willkommen" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/hitl.json new file mode 100644 index 0000000000000..5ac8af954cb43 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/hitl.json @@ -0,0 +1,24 @@ +{ + "requiredAction_one": "Erforderliche Interaktion", + "requiredAction_other": "Erforderliche Interaktionen", + "requiredActionState": "Status der Interaktion", + "response": { + "error": "Senden der Antwort fehlgeschlagen", + "optionsDescription": "Wählen Sie Ihre Optionen für diesen Task", + "optionsLabel": "Optionen", + "received": "Antwort empfangen um ", + "respond": "Antworten", + "success": "{{taskId}} Interaktion erfolgreich", + "title": "Erforderliche Interaktion - {{taskId}}" + }, + "state": { + "approvalReceived": "Genehmigung erhalten", + "approvalRequired": "Genehmigung erforderlich", + "choiceReceived": "Auswahl getroffen", + "choiceRequired": "Auswahl erforderlich", + "rejectionReceived": "Ablehnung erhalten", + "responseReceived": "Antwort erhalten", + "responseRequired": "Antwort erforderlich" + }, + "subject": "Betreff" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/admin.json new file mode 100644 index 0000000000000..0eb5df5b8a22d --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/admin.json @@ -0,0 +1,167 @@ +{ + "columns": { + "description": "Description", + "key": "Key", + "name": "Name", + "value": "Value" + }, + "config": { + "columns": { + "section": "Section" + }, + "title": "Airflow Configuration" + }, + "connections": { + "add": "Add Connection", + "columns": { + "connectionId": "Connection ID", + "connectionType": "Connection Type", + "host": "Host", + "port": "Port" + }, + "connection_one": "Connection", + "connection_other": "Connections", + "delete": { + "deleteConnection_one": "Delete 1 connection", + "deleteConnection_other": "Delete {{count}} connections", + "firstConfirmMessage_one": "You are about to delete the following connection:", + "firstConfirmMessage_other": "You are about to delete the following connections:", + "title": "Delete Connection" + }, + "edit": "Edit Connection", + "form": { + "connectionIdRequired": "Connection ID is required", + "connectionIdRequirement": "Connection ID cannot contain only spaces", + "connectionTypeRequired": "Connection Type is required", + "extraFields": "Extra Fields", + "extraFieldsJson": "Extra Fields JSON", + "helperText": "Connection type missing? Make sure you have installed the corresponding Airflow Providers Package.", + "helperTextForRedactedFields": "Redacted fields ('***') will remain unchanged if not modified.", + "selectConnectionType": "Select Connection Type", + "standardFields": "Standard Fields" + }, + "nothingFound": { + "description": "Connections defined via environment variables or secrets managers are not listed here.", + "documentationLink": "Learn more in the Airflow documentation.", + "learnMore": "These are resolved at runtime and are not visible in the UI.", + "title": "No connection found!" + }, + "searchPlaceholder": "Search Connections", + "test": "Test Connection", + "testDisabled": "Test connection feature is disabled. Please contact an administrator to enable it.", + "typeMeta": { + "error": "Failed to retrieve Connection Type Meta", + "standardFields": { + "description": "Description", + "host": "Host", + "login": "Login", + "password": "Password", + "port": "Port", + "url_schema": "Schema" + } + } + }, + "deleteActions": { + "button": "Delete", + "modal": { + "confirmButton": "Yes, Delete", + "secondConfirmMessage": "This action is permanent and cannot be undone.", + "thirdConfirmMessage": " Are you sure you want to proceed?" + }, + "selected": "Selected", + "tooltip": "Delete selected connections" + }, + "formActions": { + "reset": "Reset", + "save": "Save" + }, + "plugins": { + "columns": { + "source": "Source" + }, + "importError_one": "Plugin Import Error", + "importError_other": "Plugin Import Errors", + "searchPlaceholder": "Search by file" + }, + "pools": { + "add": "Add Pool", + "deferredSlotsIncluded": "Deferred Slots Included", + "delete": { + "title": "Delete Pool", + "warning": "This will remove all metadata related to the pool and may affect tasks using this pool." + }, + "edit": "Edit Pool", + "form": { + "checkbox": "Check to include deferred tasks when calculating open pool slots", + "description": "Description", + "includeDeferred": "Include Deferred", + "nameMaxLength": "Name can contain a maximum of 256 characters", + "nameRequired": "Name is required", + "slots": "Slots" + }, + "noPoolsFound": "No pools found", + "pool_one": "Pool", + "pool_other": "Pools", + "searchPlaceholder": "Search Pools", + "sort": { + "asc": "Name (A-Z)", + "desc": "Name (Z-A)", + "placeholder": "Sort by" + } + }, + "providers": { + "columns": { + "packageName": "Package Name", + "version": "Version" + } + }, + "variables": { + "add": "Add Variable", + "columns": { + "isEncrypted": "Is Encrypted" + }, + "delete": { + "deleteVariable_one": "Delete 1 Variable", + "deleteVariable_other": "Delete {{count}} Variables", + "firstConfirmMessage_one": "You are about to delete the following variable:", + "firstConfirmMessage_other": "You are about to delete the following variables:", + "title": "Delete Variable", + "tooltip": "Delete selected variables" + }, + "edit": "Edit Variable", + "export": "Export", + "exportTooltip": "Export selected variables", + "form": { + "invalidJson": "Invalid JSON", + "keyMaxLength": "Key can contain a maximum of 250 characters", + "keyRequired": "Key is required", + "valueRequired": "Value is required" + }, + "import": { + "button": "Import", + "conflictResolution": "Select Variable Conflict Resolution", + "errorParsingJsonFile": "Error Parsing JSON File: Upload a JSON file containing variables (e.g., {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "Fails the import if any existing variables are detected.", + "title": "Fail" + }, + "overwrite": { + "description": "Overwrites the variable in case of a conflict.", + "title": "Overwrite" + }, + "skip": { + "description": "Skips importing variables that already exist.", + "title": "Skip" + } + }, + "title": "Import Variables", + "upload": "Upload a JSON File", + "uploadPlaceholder": "Upload a JSON file containing variables (e.g., {\"key\": \"value\", ...})" + }, + "noRowsMessage": "No variables found", + "searchPlaceholder": "Search Keys", + "variable_one": "Variable", + "variable_other": "Variables" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/assets.json new file mode 100644 index 0000000000000..f9af5aa8e5da8 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "Consuming Dags", + "createEvent": { + "button": "Create Event", + "manual": { + "description": "Manually create an Asset Event", + "extra": "Asset Event Extra", + "label": "Manual" + }, + "materialize": { + "description": "Trigger the Dag upstream of this asset", + "descriptionWithDag": "Trigger the Dag upstream of this asset: {{dagName}}", + "label": "Materialize", + "unpauseDag": "Unpause {{dagName}} on trigger" + }, + "success": { + "manualDescription": "Manual asset event creation was successful.", + "manualTitle": "Asset Event Created", + "materializeDescription": "Upstream Dag {{dagId}} was triggered successfully.", + "materializeTitle": "Materializing Asset" + }, + "title": "Create Asset Event for {{name}}" + }, + "group": "Group", + "lastAssetEvent": "Last Asset Event", + "name": "Name", + "producingTasks": "Producing Tasks", + "scheduledDags": "Scheduled Dags", + "searchPlaceholder": "Search Assets" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/browse.json new file mode 100644 index 0000000000000..4a2ab97354b16 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "Collapse all extra json", + "expandAllExtra": "Expand all extra json" + }, + "columns": { + "event": "Event", + "extra": "Extra", + "user": "User", + "when": "When" + }, + "title": "Audit Log" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "Key", + "value": "Value" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/common.json new file mode 100644 index 0000000000000..8dda576063e0b --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/common.json @@ -0,0 +1,302 @@ +{ + "admin": { + "Config": "Config", + "Connections": "Connections", + "Plugins": "Plugins", + "Pools": "Pools", + "Providers": "Providers", + "Variables": "Variables" + }, + "asset_one": "Asset", + "asset_other": "Assets", + "assetEvent_one": "Asset Event", + "assetEvent_other": "Asset Events", + "backfill_one": "Backfill", + "backfill_other": "Backfills", + "browse": { + "auditLog": "Audit Log", + "requiredActions": "Required Actions", + "xcoms": "XComs" + }, + "collapseDetailsPanel": "Collapse Details Panel", + "createdAssetEvent_one": "Created Asset Event", + "createdAssetEvent_other": "Created Asset Events", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "Catchup", + "concurrency": "Concurrency", + "dagRunTimeout": "Dag Run Timeout", + "defaultArgs": "Default Args", + "description": "Description", + "documentation": "Dag Documentation", + "fileLocation": "File Location", + "hasTaskConcurrencyLimits": "Has Task Concurrency Limits", + "lastExpired": "Last Expired", + "lastParsed": "Last Parsed", + "latestDagVersion": "Latest Dag Version", + "latestRun": "Latest Run", + "maxActiveRuns": "Max Active Runs", + "maxActiveTasks": "Max Active Tasks", + "maxConsecutiveFailedDagRuns": "Max Consecutive Failed Dag Runs", + "nextRun": "Next Run", + "owner": "Owner", + "params": "Params", + "schedule": "Schedule", + "tags": "Tags" + }, + "dagId": "Dag ID", + "dagRun": { + "conf": "Conf", + "dagVersions": "Dag Version(s)", + "dataIntervalEnd": "Data Interval End", + "dataIntervalStart": "Data Interval Start", + "lastSchedulingDecision": "Last Scheduling Decision", + "queuedAt": "Queued At", + "runAfter": "Run After", + "runType": "Run Type", + "sourceAssetEvent": "Source Asset Event", + "triggeredBy": "Triggered By", + "triggeringUser": "Triggering User Name" + }, + "dagRun_one": "Dag Run", + "dagRun_other": "Dag Runs", + "dagRunId": "Dag Run ID", + "dagWarnings": "Dag warnings/errors", + "defaultToGraphView": "Default to graph view", + "defaultToGridView": "Default to grid view", + "direction": "Direction", + "docs": { + "documentation": "Documentation", + "githubRepo": "GitHub Repo", + "restApiReference": "REST API Reference" + }, + "duration": "Duration", + "endDate": "End Date", + "error": { + "back": "Back", + "defaultMessage": "An unexpected error occurred", + "home": "Home", + "notFound": "Page Not Found", + "title": "Error" + }, + "expand": { + "collapse": "Collapse", + "expand": "Expand", + "hotkey": "e", + "tooltip": "Press {{hotkey}} to toggle expand" + }, + "expression": { + "all": "All", + "and": "AND", + "any": "Any", + "or": "OR" + }, + "logicalDate": "Logical Date", + "logout": "Logout", + "logoutConfirmation": "You are about to logout from the application.", + "mapIndex": "Map Index", + "modal": { + "cancel": "Cancel", + "confirm": "Confirm", + "delete": { + "button": "Delete", + "confirmation": "Are you sure you want to delete {{resourceName}}? This action cannot be undone." + } + }, + "nav": { + "admin": "Admin", + "assets": "Assets", + "browse": "Browse", + "dags": "Dags", + "docs": "Docs", + "home": "Home", + "legacyFabViews": "Legacy Views", + "plugins": "Plugins", + "security": "Security" + }, + "noItemsFound": "No {{modelName}} found", + "note": { + "add": "Add a note", + "dagRun": "Dag Run Note", + "label": "Note", + "placeholder": "Add a note...", + "taskInstance": "Task Instance Note" + }, + "pools": { + "deferred": "Deferred", + "open": "Open", + "pools_one": "pool", + "pools_other": "pools", + "queued": "Queued", + "running": "Running", + "scheduled": "Scheduled" + }, + "runId": "Run ID", + "runTypes": { + "asset_triggered": "Asset Triggered", + "backfill": "Backfill", + "manual": "Manual", + "scheduled": "Scheduled" + }, + "scroll": { + "direction": { + "bottom": "bottom", + "top": "top" + }, + "tooltip": "Press {{hotkey}} to scroll to {{direction}}" + }, + "seconds": "{{count}}s", + "security": { + "actions": "Actions", + "permissions": "Permissions", + "resources": "Resources", + "roles": "Roles", + "users": "Users" + }, + "selectLanguage": "Select Language", + "showDetailsPanel": "Show Details Panel", + "source": { + "hide": "Hide Source", + "hotkey": "s", + "show": "Show Source" + }, + "sourceAssetEvent_one": "Source Asset Event", + "sourceAssetEvent_other": "Source Asset Events", + "startDate": "Start Date", + "state": "State", + "states": { + "deferred": "Deferred", + "failed": "Failed", + "no_status": "No Status", + "none": "No Status", + "queued": "Queued", + "removed": "Removed", + "restarting": "Restarting", + "running": "Running", + "scheduled": "Scheduled", + "skipped": "Skipped", + "success": "Success", + "up_for_reschedule": "Up For Reschedule", + "up_for_retry": "Up For Retry", + "upstream_failed": "Upstream Failed" + }, + "switchToDarkMode": "Switch to Dark Mode", + "switchToLightMode": "Switch to Light Mode", + "table": { + "completedAt": "Completed at", + "createdAt": "Created at", + "filterByTag": "Filter Dags by tag", + "filterColumns": "Filter table columns", + "filterReset_one": "Reset filter", + "filterReset_other": "Reset filters", + "from": "From", + "maxActiveRuns": "Max Active Runs", + "noTagsFound": "No tags found", + "tagMode": { + "all": "All", + "any": "Any" + }, + "tagPlaceholder": "Filter by tag", + "to": "To" + }, + "task": { + "documentation": "Task Documentation", + "lastInstance": "Last Instance", + "operator": "Operator", + "triggerRule": "Trigger Rule" + }, + "task_one": "Task", + "task_other": "Tasks", + "taskId": "Task ID", + "taskInstance": { + "dagVersion": "Dag Version", + "executor": "Executor", + "executorConfig": "Executor Config", + "hostname": "Hostname", + "maxTries": "Max Tries", + "pid": "PID", + "pool": "Pool", + "poolSlots": "Pool Slots", + "priorityWeight": "Priority Weight", + "queue": "Queue", + "queuedWhen": "Queued At", + "scheduledWhen": "Scheduled At", + "triggerer": { + "assigned": "Assigned triggerer", + "class": "Trigger class", + "createdAt": "Trigger creation time", + "id": "Trigger ID", + "latestHeartbeat": "Latest triggerer heartbeat", + "title": "Triggerer Info" + }, + "unixname": "Unix Name" + }, + "taskInstance_one": "Task Instance", + "taskInstance_other": "Task Instances", + "timeRange": { + "last12Hours": "Last 12 Hours", + "last24Hours": "Last 24 Hours", + "lastHour": "Last Hour", + "pastWeek": "Past Week" + }, + "timestamp": { + "hide": "Hide Timestamps", + "hotkey": "t", + "show": "Show Timestamps" + }, + "timezone": "Timezone", + "timezoneModal": { + "current-timezone": "Current time in", + "placeholder": "Select a timezone", + "title": "Select Timezone", + "utc": "UTC (Coordinated Universal Time)" + }, + "toaster": { + "bulkDelete": { + "error": "Bulk Delete {{resourceName}} Request Failed", + "success": { + "description": "{{count}} {{resourceName}} have been successfully deleted. Keys: {{keys}}", + "title": "Bulk Delete {{resourceName}} Request Submitted" + } + }, + "create": { + "error": "Create {{resourceName}} Request Failed", + "success": { + "description": "{{resourceName}} has been successfully created.", + "title": "Create {{resourceName}} Request Submitted" + } + }, + "delete": { + "error": "Delete {{resourceName}} Request Failed", + "success": { + "description": "{{resourceName}} has been successfully deleted.", + "title": "Delete {{resourceName}} Request Submitted" + } + }, + "import": { + "error": "Import {{resourceName}} Request Failed", + "success": { + "description": "{{count}} {{resourceName}} have been successfully imported.", + "title": "Import {{resourceName}} Request Submitted" + } + }, + "update": { + "error": "Update {{resourceName}} Request Failed", + "success": { + "description": "{{resourceName}} has been successfully updated.", + "title": "Update {{resourceName}} Request Submitted" + } + } + }, + "total": "Total {{state}}", + "triggered": "Triggered", + "tryNumber": "Try Number", + "user": "User", + "wrap": { + "hotkey": "w", + "tooltip": "Press {{hotkey}} to toggle wrap", + "unwrap": "Unwrap", + "wrap": "Wrap" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/components.json new file mode 100644 index 0000000000000..3a56657187a36 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/components.json @@ -0,0 +1,134 @@ +{ + "backfill": { + "affected_one": "1 run will be triggered.", + "affected_other": "{{count}} runs will be triggered.", + "affectedNone": "No runs matching selected criteria.", + "allRuns": "All Runs", + "backwards": "Run Backwards", + "dateRange": "Date Range", + "dateRangeFrom": "From", + "dateRangeTo": "To", + "errorStartDateBeforeEndDate": "Start Date must be before the End Date", + "maxRuns": "Max Active Runs", + "missingAndErroredRuns": "Missing and Errored Runs", + "missingRuns": "Missing Runs", + "reprocessBehavior": "Reprocess Behavior", + "run": "Run Backfill", + "selectDescription": "Run this Dag for a range of dates", + "selectLabel": "Backfill", + "title": "Run Backfill", + "toaster": { + "success": { + "description": "Backfill jobs have been successfully triggered.", + "title": "Backfill generated" + } + }, + "tooltip": "Backfill requires a schedule", + "unpause": "Unpause {{dag_display_name}} on trigger", + "validation": { + "datesRequired": "Both Data Interval Start Date and End Date must be provided.", + "startBeforeEnd": "Data Interval Start Date must be less than or equal to Data Interval End Date." + } + }, + "banner": { + "backfillInProgress": "Backfill in progress", + "cancel": "Cancel backfill", + "pause": "Pause backfill", + "unpause": "Unpause backfill" + }, + "clipboard": { + "copy": "Copy" + }, + "close": "Close", + "configForm": { + "advancedOptions": "Advanced Options", + "configJson": "Configuration JSON", + "invalidJson": "Invalid JSON format: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "1 Error", + "errorAndWarning": "1 Error and {{warningText}}", + "warning_one": "1 Warning", + "warning_other": "{{count}} Warnings" + }, + "durationChart": { + "duration": "Duration (seconds)", + "lastDagRun_one": "Last Dag Run", + "lastDagRun_other": "Last {{count}} Dag Runs", + "lastTaskInstance_one": "Last Task Instance", + "lastTaskInstance_other": "Last {{count}} Task Instances", + "queuedDuration": "Queued Duration", + "runAfter": "Run After", + "runDuration": "Run Duration" + }, + "fileUpload": { + "files_other": "{{count}} files" + }, + "flexibleForm": { + "placeholder": "Select Value", + "placeholderArray": "Enter each string on a new line", + "placeholderExamples": "Start typing to see options", + "placeholderMulti": "Select one or multiple values", + "validationErrorArrayNotArray": "Value must be an array.", + "validationErrorArrayNotNumbers": "All elements in the array must be numbers.", + "validationErrorArrayNotObject": "All elements in the array must be objects.", + "validationErrorRequired": "This field is required" + }, + "graph": { + "directionDown": "Top to Bottom", + "directionLeft": "Right to Left", + "directionRight": "Left to Right", + "directionUp": "Bottom to Top", + "downloadImage": "Download graph image", + "downloadImageError": "Failed to download graph image.", + "downloadImageErrorTitle": "Download Failed", + "otherDagRuns": "+Other Dag Runs", + "taskCount_one": "{{count}} Task", + "taskCount_other": "{{count}} Tasks", + "taskGroup": "Task Group" + }, + "limitedList": "+{{count}} more", + "logs": { + "file": "File", + "location": "line {{line}} in {{name}}" + }, + "reparseDag": "Reparse Dag", + "sortedAscending": "sorted ascending", + "sortedDescending": "sorted descending", + "sortedUnsorted": "unsorted", + "taskTries": "Task Tries", + "toggleCardView": "Show card view", + "toggleTableView": "Show table view", + "triggerDag": { + "button": "Trigger", + "loading": "Loading Dag information...", + "loadingFailed": "Failed to load Dag information. Please try again.", + "runIdHelp": "Optional - will be generated if not provided", + "selectDescription": "Trigger a single run of this Dag", + "selectLabel": "Single Run", + "title": "Trigger Dag", + "toaster": { + "success": { + "description": "Dag run has been successfully triggered.", + "title": "Dag Run Triggered" + } + }, + "unpause": "Unpause {{dagDisplayName}} on trigger" + }, + "trimText": { + "details": "Details", + "empty": "Empty", + "noContent": "No content available." + }, + "versionDetails": { + "bundleLink": "Bundle Link", + "bundleName": "Bundle Name", + "bundleVersion": "Bundle Version", + "createdAt": "Created At", + "versionId": "Version ID" + }, + "versionSelect": { + "dagVersion": "Dag Version", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/dag.json new file mode 100644 index 0000000000000..a3086158bc405 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/dag.json @@ -0,0 +1,121 @@ +{ + "allRuns": "All Runs", + "blockingDeps": { + "dependency": "Dependency", + "reason": "Reason", + "title": "Dependencies Blocking Task From Getting Scheduled" + }, + "code": { + "bundleUrl": "Bundle Url", + "noCode": "No Code Found", + "parsedAt": "Parsed at:" + }, + "extraLinks": "Extra Links", + "grid": { + "buttons": { + "resetToLatest": "Reset to latest", + "toggleGroup": "Toggle group" + } + }, + "header": { + "buttons": { + "advanced": "Advanced", + "dagDocs": "Dag Docs" + } + }, + "logs": { + "allLevels": "All Log Levels", + "allSources": "All Sources", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", + "fullscreen": { + "button": "Full screen", + "tooltip": "Press {{hotkey}} for fullscreen" + }, + "info": "INFO", + "noTryNumber": "No try number", + "settings": "Log Settings", + "viewInExternal": "View logs in {{name}} (attempt {{attempt}})", + "warning": "WARNING" + }, + "navigation": { + "jump": "Jump: Shift+{{arrow}}", + "navigation": "Navigation: {{arrow}}", + "toggleGroup": "Toggle group: Space" + }, + "overview": { + "buttons": { + "failedRun_one": "Failed Run", + "failedRun_other": "Failed Runs", + "failedTask_one": "Failed Task", + "failedTask_other": "Failed Tasks", + "failedTaskInstance_one": "Failed Task Instance", + "failedTaskInstance_other": "Failed Task Instances" + }, + "charts": { + "assetEvent_one": "Created Asset Event", + "assetEvent_other": "Created Asset Events" + }, + "failedLogs": { + "title": "Recent Failed Task Logs", + "viewFullLogs": "View full logs" + } + }, + "panel": { + "buttons": { + "options": "Options", + "showGraph": "Show Graph", + "showGrid": "Show Grid" + }, + "dagRuns": { + "label": "Number of Dag Runs" + }, + "dependencies": { + "label": "Dependencies", + "options": { + "allDagDependencies": "All Dag Dependencies", + "externalConditions": "External conditions", + "onlyTasks": "Only tasks" + }, + "placeholder": "Dependencies" + }, + "graphDirection": { + "label": "Graph Direction" + } + }, + "paramsFailed": "Failed to load params", + "parse": { + "toaster": { + "error": { + "description": "Dag parsing request failed. There could be pending parsing requests yet to be processed.", + "title": "Dag Failed to Reparse" + }, + "success": { + "description": "Dag should reparse soon.", + "title": "Reparsing request submitted successfully" + } + } + }, + "tabs": { + "assetEvents": "Asset Events", + "auditLog": "Audit Log", + "backfills": "Backfills", + "code": "Code", + "details": "Details", + "logs": "Logs", + "mappedTaskInstances_one": "Task Instance [{{count}}]", + "mappedTaskInstances_other": "Task Instances [{{count}}]", + "overview": "Overview", + "renderedTemplates": "Rendered Templates", + "requiredActions": "Required Actions", + "runs": "Runs", + "taskInstances": "Task Instances", + "tasks": "Tasks", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "Collapse all task groups", + "expandAll": "Expand all task groups" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/dags.json new file mode 100644 index 0000000000000..1367ac6a929f8 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "{{count}} of {{total}} assets updated", + "dagActions": { + "delete": { + "button": "Delete Dag", + "warning": "This will remove all metadata related to the Dag, including Runs and Tasks." + } + }, + "favoriteDag": "Favorite Dag", + "filters": { + "allRunTypes": "All Run Types", + "allStates": "All States", + "favorite": { + "all": "All", + "favorite": "Favorite", + "unfavorite": "Unfavorite" + }, + "paused": { + "active": "Active", + "all": "All", + "paused": "Paused" + }, + "runIdPatternFilter": "Search Dag Runs", + "triggeringUserNameFilter": "Search by Triggering User" + }, + "ownerLink": "Owner link for {{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "No tasks found.", + "title": "Affected Tasks: {{count}}" + }, + "clear": { + "button": "Clear {{type}}", + "buttonTooltip": "Press shift+c to clear", + "error": "Failed to clear {{type}}", + "title": "Clear {{type}}" + }, + "delete": { + "button": "Delete {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "Delete {{type}}", + "warning": "This will remove all metadata related to the {{type}}." + }, + "error": "Error deleting {{type}}", + "success": { + "description": "The {{type}} deletion request was successful.", + "title": "{{type}} Deleted Successfully" + } + }, + "markAs": { + "button": "Mark {{type}} as...", + "buttonTooltip": { + "failed": "Press shift+f to mark as failed", + "success": "Press shift+s to mark as success" + }, + "title": "Mark {{type}} as {{state}}" + }, + "options": { + "downstream": "Downstream", + "existingTasks": "Clear existing tasks", + "future": "Future", + "onlyFailed": "Clear only failed tasks", + "past": "Past", + "queueNew": "Queue up new tasks", + "runOnLatestVersion": "Run with latest bundle version", + "upstream": "Upstream" + } + }, + "search": { + "advanced": "Advanced Search", + "clear": "Clear search", + "dags": "Search Dags", + "hotkey": "+K", + "tasks": "Search Tasks" + }, + "sort": { + "displayName": { + "asc": "Sort by Display Name (A-Z)", + "desc": "Sort by Display Name (Z-A)" + }, + "lastRunStartDate": { + "asc": "Sort by Latest Run Start Date (Earliest-Latest)", + "desc": "Sort by Latest Run Start Date (Latest-Earliest)" + }, + "lastRunState": { + "asc": "Sort by Latest Run State (A-Z)", + "desc": "Sort by Latest Run State (Z-A)" + }, + "nextDagRun": { + "asc": "Sort by Next Dag Run (Earliest-Latest)", + "desc": "Sort by Next Dag Run (Latest-Earliest)" + }, + "placeholder": "Sort by" + }, + "unfavoriteDag": "Unfavorite Dag" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/dashboard.json new file mode 100644 index 0000000000000..1d90ff0b777a6 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/dashboard.json @@ -0,0 +1,45 @@ +{ + "favorite": { + "favoriteDags_one": "First {{count}} favorite Dag", + "favoriteDags_other": "First {{count}} favorite Dags", + "noDagRuns": "There is no DagRun for this dag yet.", + "noFavoriteDags": "No favorites yet. Click the star icon next to a Dag in the list to add it to your favorites." + }, + "group": "Group", + "health": { + "dagProcessor": "Dag Processor", + "health": "Health", + "healthy": "Healthy", + "lastHeartbeat": "Last Heartbeat", + "metaDatabase": "MetaDatabase", + "scheduler": "Scheduler", + "status": "Status", + "triggerer": "Triggerer", + "unhealthy": "Unhealthy" + }, + "history": "History", + "importErrors": { + "dagImportError_one": "Dag Import Error", + "dagImportError_other": "Dag Import Errors", + "searchByFile": "Search by file", + "timestamp": "Timestamp" + }, + "managePools": "Manage Pools", + "noAssetEvents": "No Asset Events found.", + "poolSlots": "Pool Slots", + "sortBy": { + "newestFirst": "Newest First", + "oldestFirst": "Oldest First" + }, + "source": "Source", + "stats": { + "activeDags": "Active Dags", + "failedDags": "Failed Dags", + "queuedDags": "Queued Dags", + "requiredActions": "Required Actions", + "runningDags": "Running Dags", + "stats": "Stats" + }, + "uri": "Uri", + "welcome": "Welcome" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/hitl.json new file mode 100644 index 0000000000000..216f60668b0e0 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/hitl.json @@ -0,0 +1,24 @@ +{ + "requiredAction_one": "Required Action", + "requiredAction_other": "Required Actions", + "requiredActionState": "Required Action State", + "response": { + "error": "Response failed", + "optionsDescription": "Choose your options for this task instance", + "optionsLabel": "Options", + "received": "Response received at ", + "respond": "Respond", + "success": "{{taskId}} response successful", + "title": "Human Task Instance - {{taskId}}" + }, + "state": { + "approvalReceived": "Approval Received", + "approvalRequired": "Approval Required", + "choiceReceived": "Choice Received", + "choiceRequired": "Choice Required", + "rejectionReceived": "Rejection Received", + "responseReceived": "Response Received", + "responseRequired": "Response Required" + }, + "subject": "Subject" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/admin.json new file mode 100644 index 0000000000000..f785cba3461f0 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/admin.json @@ -0,0 +1,166 @@ +{ + "columns": { + "description": "Descripción", + "key": "Clave", + "name": "Nombre", + "value": "Valor" + }, + "config": { + "columns": { + "section": "Sección" + }, + "title": "Configuración de Airflow" + }, + "connections": { + "add": "Agregar Conexión", + "columns": { + "connectionId": "ID de la Conexión", + "connectionType": "Tipo de Conexión", + "host": "Host", + "port": "Puerto" + }, + "connection_one": "Conexión", + "connection_other": "Conexiones", + "delete": { + "deleteConnection_one": "Eliminar 1 conexión", + "deleteConnection_other": "Eliminar {{count}} conexiones", + "firstConfirmMessage_one": "Estás a punto de eliminar la siguiente conexión:", + "firstConfirmMessage_other": "Estas a punto de eliminar las siguientes conexiones:", + "title": "Eliminar Conexión" + }, + "edit": "Editar Conexión", + "form": { + "connectionIdRequired": "El ID de la conexión es requerido", + "connectionIdRequirement": "El ID de la conexión no puede contener solo espacios", + "connectionTypeRequired": "El Tipo de Conexión es requerido", + "extraFields": "Campos Extra", + "extraFieldsJson": "Campos Extra (tipo JSON)", + "helperText": "¿Falta el Tipo de conexión? Asegúrate de haber instalado el paquete de proveedores de Airflow correspondiente.", + "selectConnectionType": "Seleccionar Tipo de Conexión", + "standardFields": "Campos Estándar" + }, + "nothingFound": { + "description": "Las conexiones definidas a través de variables de entorno o gestores de secretos no se muestran aquí.", + "documentationLink": "Aprende más en la documentación de Airflow.", + "learnMore": "Estas se resuelven en tiempo de ejecución y no se muestran en la interfaz de usuario.", + "title": "No se encontraron conexiones!" + }, + "searchPlaceholder": "Buscar Conexiones", + "test": "Prueba de Conexión", + "testDisabled": "La función de prueba de conexión está desactivada. Por favor, contacta a un administrador para activarla.", + "typeMeta": { + "error": "Error al recuperar la Metadata del Tipo de Conexión", + "standardFields": { + "description": "Descripción", + "host": "Host", + "login": "Login", + "password": "Contraseña", + "port": "Puerto", + "url_schema": "Esquema" + } + } + }, + "deleteActions": { + "button": "Eliminar", + "modal": { + "confirmButton": "Sí, Eliminar", + "secondConfirmMessage": "Esta acción es permanente y no se puede deshacer.", + "thirdConfirmMessage": "¿Confirmas de que quieres proceder?" + }, + "selected": "Seleccionado", + "tooltip": "Eliminar conexiones seleccionadas" + }, + "formActions": { + "reset": "Restablecer", + "save": "Guardar" + }, + "plugins": { + "columns": { + "source": "Origen" + }, + "importError_one": "Error de Importación de Plugin", + "importError_other": "Errores de Importación de Plugins", + "searchPlaceholder": "Buscar por archivo" + }, + "pools": { + "add": "Agregar Pool", + "deferredSlotsIncluded": "Slots Diferidos Incluidos", + "delete": { + "title": "Eliminar Pool", + "warning": "Esto eliminará toda la metadata relacionada con el pool y puede afectar a las tareas que usan este pool." + }, + "edit": "Editar Pool", + "form": { + "checkbox": "Marcar para incluir tareas diferidas cuando se calculan los slots abiertos del pool", + "description": "Descripción", + "includeDeferred": "Incluir diferidos", + "nameMaxLength": "El nombre puede contener un máximo de 256 caracteres", + "nameRequired": "El nombre es requerido", + "slots": "Slots" + }, + "noPoolsFound": "No se encontraron pools", + "pool_one": "Pool", + "pool_other": "Pools", + "searchPlaceholder": "Buscar Pools", + "sort": { + "asc": "Nombre (A-Z)", + "desc": "Nombre (Z-A)", + "placeholder": "Ordenar por" + } + }, + "providers": { + "columns": { + "packageName": "Nombre del Paquete", + "version": "Versión" + } + }, + "variables": { + "add": "Agregar Variable", + "columns": { + "isEncrypted": "Está encriptada" + }, + "delete": { + "deleteVariable_one": "Eliminar 1 Variable", + "deleteVariable_other": "Eliminar {{count}} Variables", + "firstConfirmMessage_one": "Estás a punto de eliminar la siguiente variable:", + "firstConfirmMessage_other": "Estás a punto de eliminar las siguientes variables:", + "title": "Eliminar Variable", + "tooltip": "Eliminar variables seleccionadas" + }, + "edit": "Editar Variable", + "export": "Exportar", + "exportTooltip": "Exportar variables seleccionadas", + "form": { + "invalidJson": "JSON inválido", + "keyMaxLength": "La clave puede contener un máximo de 250 caracteres", + "keyRequired": "La clave es requerida", + "valueRequired": "El valor es requerido" + }, + "import": { + "button": "Import", + "conflictResolution": "Seleccionar Resolución de Conflicto de Variables", + "errorParsingJsonFile": "Error al analizar el archivo JSON: Cargar un archivo JSON que contenga variables (e.g., {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "Falla la importación si se detectan variables existentes.", + "title": "Fallar" + }, + "overwrite": { + "description": "Sobrescribe la variable en caso de conflicto.", + "title": "Sobrescribir" + }, + "skip": { + "description": "Omite la importación de variables que ya existen.", + "title": "Omitir" + } + }, + "title": "Importar Variables", + "upload": "Cargar un Archivo JSON", + "uploadPlaceholder": "Cargar un archivo JSON que contenga variables (e.g., {\"key\": \"value\", ...})" + }, + "noRowsMessage": "No se encontraron variables", + "searchPlaceholder": "Buscar Claves", + "variable_one": "Variable", + "variable_other": "Variables" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/assets.json new file mode 100644 index 0000000000000..e6042674167a9 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/assets.json @@ -0,0 +1,29 @@ +{ + "consumingDags": "Consumiendo Dags", + "createEvent": { + "button": "Crear Evento", + "manual": { + "description": "Crear un Evento de Asset manualmente", + "extra": "Evento de Asset Extra", + "label": "Manual" + }, + "materialize": { + "description": "Activar el Dag upstream de este asset", + "descriptionWithDag": "Activar el Dag upstream de este asset: {{dagName}}", + "label": "Materializar", + "unpauseDag": "Despausar {{dagName}} al activar" + }, + "success": { + "manualDescription": "La creación de eventos de asset manual fue exitosa.", + "manualTitle": "Evento de Asset Creado", + "materializeDescription": "El Dag upstream {{dagId}} fue activado exitosamente.", + "materializeTitle": "Materializando Asset" + }, + "title": "Crear Evento de Asset para {{name}}" + }, + "group": "Grupo", + "lastAssetEvent": "Último Evento de Asset", + "name": "Nombre", + "producingTasks": "Tareas produciendo", + "searchPlaceholder": "Buscar Assets" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/browse.json new file mode 100644 index 0000000000000..6dafc0010006b --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "Colapsar todos los extra json", + "expandAllExtra": "Expandir todos los extra json" + }, + "columns": { + "event": "Evento", + "extra": "Extra", + "user": "Usuario", + "when": "Cuando" + }, + "title": "Auditar Log" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "Clave", + "value": "Valor" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/common.json new file mode 100644 index 0000000000000..b16dfc59b02e8 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/common.json @@ -0,0 +1,280 @@ +{ + "admin": { + "Config": "Configuración", + "Connections": "Conexiones", + "Plugins": "Plugins", + "Pools": "Pools", + "Providers": "Proveedores", + "Variables": "Variables" + }, + "asset_one": "Asset", + "asset_other": "Assets", + "assetEvent_one": "Evento de Asset", + "assetEvent_other": "Eventos de Asset", + "backfill_one": "Backfill", + "backfill_other": "Backfills", + "browse": { + "auditLog": "Auditar Log", + "xcoms": "XComs" + }, + "collapseDetailsPanel": "Colapsar Detalles del Panel", + "createdAssetEvent_one": "Evento de Asset Creado", + "createdAssetEvent_other": "Eventos de Asset Creados", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "Catchup", + "concurrency": "Concurrencia", + "dagRunTimeout": "Tiempo de Ejecución del Dag", + "defaultArgs": "Argumentos por Defecto", + "description": "Descripción", + "documentation": "Documentación del Dag", + "fileLocation": "Ubicación del Archivo", + "hasTaskConcurrencyLimits": "Tiene límites de concurrencia de áreas", + "lastExpired": "Último Expirado", + "lastParsed": "Último Parseado", + "latestDagVersion": "Última Versión del Dag", + "latestRun": "Última Ejecución", + "maxActiveRuns": "Máximo de Ejecuciones Activas", + "maxActiveTasks": "Máximo de Tareas Activas", + "maxConsecutiveFailedDagRuns": "Máximo de Ejecuciones Fallidas Consecutivas del Dag", + "nextRun": "Siguiente Ejecución", + "owner": "Propietario", + "params": "Parámetros", + "schedule": "Programación", + "tags": "Etiquetas" + }, + "dagId": "ID del Dag", + "dagRun": { + "conf": "Conf", + "dagVersions": "Versión(es) del Dag", + "dataIntervalEnd": "Intervalo de Datos Final", + "dataIntervalStart": "Intervalo de Datos Inicial", + "lastSchedulingDecision": "Última Decisión de Programación", + "queuedAt": "En Cola en", + "runAfter": "Ejecutar Después", + "runType": "Tipo de Ejecución", + "sourceAssetEvent": "Evento de Asset Fuente", + "triggeredBy": "Activado por" + }, + "dagRun_one": "Ejecución del Dag", + "dagRun_other": "Ejecuciones del Dag", + "dagWarnings": "Advertencias/Errores del Dag", + "defaultToGraphView": "Por defecto a vista gráfica", + "defaultToGridView": "Por defecto a vista en cuadrícula", + "direction": "Dirección", + "docs": { + "documentation": "Documentación", + "githubRepo": "Repositorio de GitHub", + "restApiReference": "Referencia de REST API" + }, + "duration": "Duración", + "endDate": "Fecha Final", + "error": { + "back": "Atrás", + "defaultMessage": "Ocurrió un error inesperado", + "home": "Inicio", + "notFound": "Página no encontrada", + "title": "Error" + }, + "expression": { + "all": "Todos", + "and": "Y", + "any": "Cualquiera", + "or": "O" + }, + "logicalDate": "Fecha Lógica", + "logout": "Cerrar Sesión", + "logoutConfirmation": "Estás a punto de cerrar sesión de la aplicación.", + "mapIndex": "Mapa de Índice", + "modal": { + "cancel": "Cancelar", + "confirm": "Confirmar", + "delete": { + "button": "Eliminar", + "confirmation": "¿Confirmas de querer eliminar {{resourceName}}? Esta acción no se puede deshacer." + } + }, + "nav": { + "admin": "Administración", + "assets": "Assets", + "browse": "Navegar", + "dags": "Dags", + "docs": "Docs", + "home": "Inicio", + "plugins": "Plugins", + "security": "Seguridad" + }, + "noItemsFound": "No se encontraron {{modelName}}s", + "note": { + "add": "Agregar una nota", + "dagRun": "Nota de Ejecución del Dag", + "label": "Nota", + "placeholder": "Agregar una nota...", + "taskInstance": "Nota de Instancia de Tarea" + }, + "pools": { + "deferred": "Diferido", + "open": "Abierto", + "pools_one": "pool", + "pools_other": "pools", + "queued": "En Cola", + "running": "En Ejecución", + "scheduled": "Programado" + }, + "runId": "ID de la corrida", + "runTypes": { + "asset_triggered": "Asset Activado", + "backfill": "Backfill", + "manual": "Manual", + "scheduled": "Programado" + }, + "scroll": { + "direction": { + "bottom": "abajo", + "top": "arriba" + }, + "tooltip": "Presiona {{hotkey}} para desplazarte a {{direction}}" + }, + "seconds": "{{count}}s", + "security": { + "actions": "Acciones", + "permissions": "Permisos", + "resources": "Recursos", + "roles": "Roles", + "users": "Usuarios" + }, + "selectLanguage": "Seleccionar Idioma", + "showDetailsPanel": "Mostrar Panel de Detalles", + "sourceAssetEvent_one": "Evento de Asset Fuente", + "sourceAssetEvent_other": "Eventos de Asset Fuente", + "startDate": "Fecha Inicial", + "state": "Estado", + "states": { + "deferred": "Diferido", + "failed": "Fallido", + "no_status": "Sin Estado", + "none": "Sin Estado", + "queued": "En Cola", + "removed": "Removido", + "restarting": "Reiniciando", + "running": "En Ejecución", + "scheduled": "Programado", + "skipped": "Omitido", + "success": "Exitoso", + "up_for_reschedule": "Por Reprogramar", + "up_for_retry": "Por Reintentar", + "upstream_failed": "Fallido en Upstream" + }, + "switchToDarkMode": "Cambiar a Modo Oscuro", + "switchToLightMode": "Cambiar a Modo Claro", + "table": { + "completedAt": "Completado en", + "createdAt": "Creado en", + "filterByTag": "Filtrar Dags por etiqueta", + "filterColumns": "Filtrar columnas de la tabla", + "filterReset_one": "Restablecer filtro", + "filterReset_other": "Restablecer filtros", + "from": "Desde", + "maxActiveRuns": "Máximo de Ejecuciones Activas", + "noTagsFound": "No se encontraron etiquetas", + "tagMode": { + "all": "Todos", + "any": "Cualquiera" + }, + "tagPlaceholder": "Filtrar por etiqueta", + "to": "Hasta" + }, + "task": { + "documentation": "Documentación de la Tarea", + "lastInstance": "Última Instancia", + "operator": "Operador", + "triggerRule": "Regla de Activación" + }, + "task_one": "Tarea", + "task_other": "Tareas", + "taskId": "ID de la Tarea", + "taskInstance": { + "dagVersion": "Versión del Dag", + "executor": "Executor", + "executorConfig": "Configuración del Executor", + "hostname": "Nombre de Host", + "maxTries": "Máximo de Intentos", + "pid": "PID", + "pool": "Pool", + "poolSlots": "Slots del Pool", + "priorityWeight": "Peso de Prioridad", + "queue": "Cola", + "queuedWhen": "En Cola en", + "scheduledWhen": "Programado en", + "triggerer": { + "assigned": "Triggerer Asignado", + "class": "Clase del Trigger", + "createdAt": "Tiempo de Creación del Trigger", + "id": "ID del Trigger", + "latestHeartbeat": "Último Heartbeat del Triggerer", + "title": "Información del Triggerer" + }, + "unixname": "Nombre de Unix" + }, + "taskInstance_one": "Instancia de Tarea", + "taskInstance_other": "Instancias de Tarea", + "timeRange": { + "last12Hours": "Últimas 12 Horas", + "last24Hours": "Últimas 24 Horas", + "lastHour": "Última Hora", + "pastWeek": "Semana Pasada" + }, + "timezone": "Zona Horaria", + "timezoneModal": { + "current-timezone": "Hora actual en", + "placeholder": "Seleccionar una zona horaria", + "title": "Seleccionar Zona Horaria", + "utc": "UTC (Tiempo Universal Coordinado)" + }, + "toaster": { + "bulkDelete": { + "error": "Eliminar {{resourceName}} Request Fallido", + "success": { + "description": "{{count}} {{resourceName}} han sido eliminados exitosamente. Claves: {{keys}}", + "title": "Eliminar {{resourceName}} Request Enviado" + } + }, + "create": { + "error": "Crear {{resourceName}} Request Fallido", + "success": { + "description": "{{resourceName}} ha sido creado exitosamente.", + "title": "Crear {{resourceName}} Request Enviado" + } + }, + "delete": { + "error": "Eliminar {{resourceName}} Request Fallido", + "success": { + "description": "{{resourceName}} ha sido eliminado exitosamente.", + "title": "Eliminar {{resourceName}} Request Enviado" + } + }, + "import": { + "error": "Importar {{resourceName}} Request Fallido", + "success": { + "description": "{{count}} {{resourceName}} han sido importados exitosamente.", + "title": "Importar {{resourceName}} Request Enviado" + } + }, + "update": { + "error": "Actualizar {{resourceName}} Request Fallido", + "success": { + "description": "{{resourceName}} ha sido actualizado exitosamente.", + "title": "Actualizar {{resourceName}} Request Enviado" + } + } + }, + "triggered": "Activado", + "tryNumber": "Intento Número", + "user": "Usuario", + "wrap": { + "tooltip": "Presiona {{hotkey}} para alternar el 'envolver'", + "unwrap": "Desenvolver", + "wrap": "Envolver" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json new file mode 100644 index 0000000000000..6ebf32f3271cd --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json @@ -0,0 +1,131 @@ +{ + "backfill": { + "affected_one": "1 ejecución será activada.", + "affected_other": "{{count}} ejecuciones serán activadas.", + "affectedNone": "No hay ejecuciones que coincidan con los criterios seleccionados.", + "backwards": "Ejecutar Hacia Atrás", + "dateRange": "Rango de Fechas", + "dateRangeFrom": "Desde", + "dateRangeTo": "Hasta", + "errorStartDateBeforeEndDate": "La Fecha Inicial debe ser antes de la Fecha Final", + "maxRuns": "Máximo de Ejecuciones Activas", + "reprocessBehavior": "Comportamiento de Reprocesamiento", + "run": "Ejecutar Backfill", + "selectDescription": "Ejecutar este Dag para un rango de fechas", + "selectLabel": "Backfill", + "title": "Ejecutar Backfill", + "toaster": { + "success": { + "description": "Backfill jobs han sido activados exitosamente.", + "title": "Backfill generado" + } + }, + "tooltip": "Backfill requiere una programación", + "unpause": "Reanudar {{dag_display_name}} al activarse", + "validation": { + "datesRequired": "Ambos intervalos de Fecha Inicial y Fecha Final deben ser proporcionados.", + "startBeforeEnd": "El intervalo de Fecha Inicial debe ser menor o igual a la Fecha Final." + } + }, + "banner": { + "backfillInProgress": "Backfill en progreso", + "cancel": "Cancelar backfill", + "pause": "Pausar backfill", + "unpause": "Reanudar backfill" + }, + "clipboard": { + "copy": "Copiar" + }, + "close": "Cerrar", + "configForm": { + "advancedOptions": "Opciones Avanzadas", + "configJson": "Configuración JSON", + "invalidJson": "Formato JSON inválido: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "1 Error", + "errorAndWarning": "1 Error y {{warningText}}", + "warning_one": "1 Aviso", + "warning_other": "{{count}} Advertencias" + }, + "durationChart": { + "duration": "Duración (segundos)", + "lastDagRun_one": "Última Ejecución de Dag", + "lastDagRun_other": "Últimas {{count}} Ejecuciones de Dag", + "lastTaskInstance_one": "Última Instancia de Tarea", + "lastTaskInstance_other": "Últimas {{count}} Instancias de Tarea", + "queuedDuration": "Duración en Cola", + "runAfter": "Ejecutar Después", + "runDuration": "Duración de la Ejecución" + }, + "fileUpload": { + "files_other": "{{count}} archivos" + }, + "flexibleForm": { + "placeholder": "Seleccionar Valor", + "placeholderArray": "Ingrese cada cadena en una nueva línea", + "placeholderExamples": "Comience a escribir para ver opciones", + "placeholderMulti": "Seleccionar uno o múltiples valores", + "validationErrorArrayNotArray": "El valor debe ser un array.", + "validationErrorArrayNotNumbers": "Todos los elementos en el array deben ser números.", + "validationErrorArrayNotObject": "Todos los elementos en el array deben ser objetos.", + "validationErrorRequired": "Este campo es requerido" + }, + "graph": { + "directionDown": "De arriba a abajo", + "directionLeft": "De derecha a izquierda", + "directionRight": "De izquierda a derecha", + "directionUp": "De abajo a arriba", + "downloadImage": "Descargar imagen", + "downloadImageError": "Error al descargar la imagen.", + "downloadImageErrorTitle": "Descarga Fallida", + "otherDagRuns": "+Otras Ejecuciones de Dag", + "taskCount_one": "{{count}} Tarea", + "taskCount_other": "{{count}} Tareas", + "taskGroup": "Grupo de Tareas" + }, + "limitedList": "+{{count}} más", + "logs": { + "file": "Archivo", + "location": "línea {{line}} en {{name}}" + }, + "reparseDag": "Reparar Dag", + "sortedAscending": "ordenado ascendente", + "sortedDescending": "ordenado descendente", + "sortedUnsorted": "sin ordenar", + "taskTries": "Intentos de Tarea", + "toggleCardView": "Mostrar vista de tarjeta", + "toggleTableView": "Mostrar vista de tabla", + "triggerDag": { + "button": "Trigger", + "loading": "Cargando información del Dag...", + "loadingFailed": "Error al cargar la información del Dag. Por favor, inténtelo de nuevo.", + "runIdHelp": "Opcional - se generará si no se proporciona", + "selectDescription": "Activar una ejecución única de este Dag", + "selectLabel": "Ejecución Única", + "title": "Activar Dag", + "toaster": { + "success": { + "description": "La ejecución del Dag ha sido activada exitosamente.", + "title": "Ejecución del Dag Activada" + } + }, + "unpause": "Reanudar {{dagDisplayName}} al activarse" + }, + "trimText": { + "details": "Detalles", + "empty": "Vacío", + "noContent": "No hay contenido disponible." + }, + "versionDetails": { + "bundleLink": "Enlace del Bundle", + "bundleName": "Nombre del Bundle", + "bundleVersion": "Versión del Bundle", + "createdAt": "Creado en", + "versionId": "ID de la Versión" + }, + "versionSelect": { + "dagVersion": "Versión del Dag", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/dag.json new file mode 100644 index 0000000000000..6d16460f67c8c --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/dag.json @@ -0,0 +1,110 @@ +{ + "allRuns": "Todas las Ejecuciones", + "blockingDeps": { + "dependency": "Dependencia", + "reason": "Razón", + "title": "Dependencias que bloquean la tarea de ser programada" + }, + "code": { + "bundleUrl": "URL del Bundle", + "noCode": "No se encontró código", + "parsedAt": "Parseado en:" + }, + "extraLinks": "Enlaces Extra", + "grid": { + "buttons": { + "resetToLatest": "Reiniciar a la última", + "toggleGroup": "Alternar grupo" + } + }, + "header": { + "buttons": { + "dagDocs": "Documentacion del Dag" + } + }, + "logs": { + "noTryNumber": "No hay número de intento", + "viewInExternal": "Ver logs en {{name}} (intento {{attempt}})" + }, + "overview": { + "buttons": { + "failedRun_one": "Ejecución Fallida", + "failedRun_other": "Ejecuciones Fallidas", + "failedTask_one": "Tarea Fallida", + "failedTask_other": "Tareas Fallidas", + "failedTaskInstance_one": "Instancia de Tarea Fallida", + "failedTaskInstance_other": "Instancias de Tarea Fallidas" + }, + "charts": { + "assetEvent_one": "Evento de Asset Creado", + "assetEvent_other": "Eventos de Asset Creados" + }, + "failedLogs": { + "title": "Logs de Tareas Fallidas Recientes", + "viewFullLogs": "Ver logs completos" + } + }, + "panel": { + "buttons": { + "options": "Opciones", + "showGraph": "Mostrar Gráfico", + "showGrid": "Mostrar Grilla" + }, + "dagRuns": { + "label": "Número de Ejecuciones de Dag" + }, + "dependencies": { + "label": "Dependencias", + "options": { + "allDagDependencies": "Todas las Dependencias de Dag", + "externalConditions": "Condiciones Externas", + "onlyTasks": "Solo tareas" + }, + "placeholder": "Dependencias" + }, + "graphDirection": { + "label": "Dirección del Gráfico" + } + }, + "paramsFailed": "Error al cargar los parámetros", + "parse": { + "toaster": { + "error": { + "description": "El Dag no pudo ser reparado. Puede haber solicitudes de reparsing pendientes por procesar.", + "title": "El Dag no pudo ser reparado" + }, + "success": { + "description": "El Dag debería reparsearse pronto.", + "title": "Solicitud de reparsing enviada exitosamente" + } + } + }, + "tabs": { + "assetEvents": "Eventos de Asset", + "auditLog": "Auditoría de Log", + "backfills": "Backfills", + "code": "Código", + "details": "Detalles", + "logs": "Logs", + "mappedTaskInstances_one": "Instancia de Tarea [{{count}}]", + "mappedTaskInstances_other": "Instancias de Tarea [{{count}}]", + "overview": "Resumen", + "renderedTemplates": "Plantillas Renderizadas", + "runs": "Ejecuciones", + "taskInstances": "Instancias de Tarea", + "tasks": "Tareas", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "Colapsar todos los grupos de tareas", + "expandAll": "Expandir todos los grupos de tareas" + }, + "taskLogs": { + "allLogLevels": "Todos los Niveles de Log", + "allSources": "Todos los Orígenes", + "fullscreen": { + "button": "Pantalla completa", + "tooltip": "Presiona {{hotkey}} para pantalla completa" + } + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/dags.json new file mode 100644 index 0000000000000..35dc7093e915e --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/dags.json @@ -0,0 +1,87 @@ +{ + "assetSchedule": "{{count}} de {{total}} assets actualizados", + "dagActions": { + "delete": { + "button": "Eliminar Dag", + "warning": "Esto eliminará toda la metadata relacionada con el Dag, incluyendo Ejecuciones y Tareas." + } + }, + "filters": { + "allRunTypes": "Todos los Tipos de Ejecución", + "allStates": "Todos los Estados", + "paused": { + "active": "Activo", + "all": "Todos", + "paused": "Pausado" + } + }, + "ownerLink": "Enlace de Propietario para {{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "No se encontraron tareas.", + "title": "Tareas Afectadas: {{count}}" + }, + "clear": { + "button": "Limpiar {{type}}", + "buttonTooltip": "Presiona shift+c para limpiar", + "error": "Error al limpiar {{type}}", + "title": "Limpiar {{type}}" + }, + "delete": { + "button": "Eliminar {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "Eliminar {{type}}", + "warning": "Esto eliminará toda la metadata relacionada con el {{type}}." + }, + "error": "Error al eliminar {{type}}", + "success": { + "description": "La solicitud de eliminación de {{type}} fue exitosa.", + "title": "{{type}} Eliminado Exitosamente" + } + }, + "markAs": { + "button": "Marcar {{type}} como...", + "buttonTooltip": { + "failed": "Presiona shift+f para marcar como fallido", + "success": "Presiona shift+s para marcar como exitoso" + }, + "title": "Marcar {{type}} como {{state}}" + }, + "options": { + "downstream": "Downstream", + "existingTasks": "Limpiar tareas existentes", + "future": "Futuro", + "onlyFailed": "Limpiar solo tareas fallidas", + "past": "Pasado", + "queueNew": "Poner en cola nuevas tareas", + "upstream": "Upstream" + } + }, + "search": { + "advanced": "Búsqueda Avanzada", + "clear": "Limpiar búsqueda", + "dags": "Buscar Dags", + "hotkey": "+K", + "tasks": "Buscar Tareas" + }, + "sort": { + "displayName": { + "asc": "Ordenar por Nombre (A-Z)", + "desc": "Ordenar por Nombre (Z-A)" + }, + "lastRunStartDate": { + "asc": "Ordenar por Fecha Inicial de Ejecución (Más Antiguo-Más Reciente)", + "desc": "Ordenar por Fecha Inicial de Ejecución (Más Reciente-Más Antiguo)" + }, + "lastRunState": { + "asc": "Ordenar por Estado de Ejecución (A-Z)", + "desc": "Ordenar por Estado de Ejecución (Z-A)" + }, + "nextDagRun": { + "asc": "Ordenar por Ejecución de Dag (Más Antiguo-Más Reciente)", + "desc": "Ordenar por Ejecución de Dag (Más Reciente-Más Antiguo)" + }, + "placeholder": "Ordenar por" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/dashboard.json new file mode 100644 index 0000000000000..0d18361392d21 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/dashboard.json @@ -0,0 +1,38 @@ +{ + "group": "Grupo", + "health": { + "dagProcessor": "Procesador de Dags", + "health": "Salud", + "healthy": "Saludable", + "lastHeartbeat": "Último Heartbeat", + "metaDatabase": "Base de datos de la metadata", + "scheduler": "Programador", + "status": "Estado", + "triggerer": "Triggerer", + "unhealthy": "No Saludable" + }, + "history": "Historial", + "importErrors": { + "dagImportError_one": "Error de Importación de Dag", + "dagImportError_other": "Errores de Importación de Dags", + "searchByFile": "Buscar por archivo", + "timestamp": "Timestamp" + }, + "managePools": "Gestionar Pools", + "noAssetEvents": "No se encontraron Eventos de Asset.", + "poolSlots": "Slots del Pool", + "sortBy": { + "newestFirst": "Más Recientes", + "oldestFirst": "Más Antiguos" + }, + "source": "Origen", + "stats": { + "activeDags": "Dags Activos", + "failedDags": "Dags Fallidos", + "queuedDags": "Dags en Cola", + "runningDags": "Dags en Ejecución", + "stats": "Estadísticas" + }, + "uri": "URI", + "welcome": "Te damos la bienvenida" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/admin.json new file mode 100644 index 0000000000000..9503b5a11c178 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/admin.json @@ -0,0 +1,163 @@ +{ + "columns": { + "description": "Description", + "key": "Clé", + "name": "Nom", + "value": "Valeur" + }, + "config": { + "columns": { + "section": "Section" + }, + "title": "Configuration d'Airflow" + }, + "connections": { + "add": "Ajouter une Connexion", + "columns": { + "connectionId": "ID de la connexion", + "connectionType": "Type de la connexion", + "host": "Hôte", + "port": "Port" + }, + "connection_one": "Connexion", + "connection_other": "Connexions", + "delete": { + "deleteConnection_one": "Supprimer 1 connexion", + "deleteConnection_other": "Supprimer {{count}} connexions", + "firstConfirmMessage_one": "Vous êtes sur le point de supprimer la connexion suivante :", + "firstConfirmMessage_other": "Vous êtes sur le point de supprimer les connexions suivantes :", + "title": "Supprimer la Connexion" + }, + "edit": "Modifier la Connexion", + "form": { + "connectionIdRequired": "L'ID de la connexion est requis", + "connectionIdRequirement": "L'ID de la connexion ne peut pas contenir uniquement des espaces", + "connectionTypeRequired": "Le type de la connexion est requis", + "extraFields": "Champs Supplémentaires", + "extraFieldsJson": "Champs Supplémentaires JSON", + "helperText": "Le type de la connexion est manquant ? Assurez-vous d'avoir installé le package Airflow Providers correspondant.", + "selectConnectionType": "Sélectionner le Type de Connexion", + "standardFields": "Champs Standards" + }, + "nothingFound": { + "title": "Aucune connexion trouvée!" + }, + "searchPlaceholder": "Rechercher les connexions", + "test": "Test la connexion", + "testDisabled": "Le test de connexion est désactivé. Veuillez contacter un administrateur pour l'activer.", + "typeMeta": { + "error": "Échec de la récupération des métadonnées du type de connexion", + "standardFields": { + "description": "Description", + "host": "Hôte", + "login": "Identifiant", + "password": "Mot de passe", + "port": "Port", + "url_schema": "Schéma" + } + } + }, + "deleteActions": { + "button": "Supprimer", + "modal": { + "confirmButton": "Oui, Supprimer", + "secondConfirmMessage": "Cette action est irréversible.", + "thirdConfirmMessage": " Êtes-vous sûr de vouloir continuer ?" + }, + "selected": "Sélectionné", + "tooltip": "Supprimer les connexions sélectionnées" + }, + "formActions": { + "reset": "Réinitialiser", + "save": "Sauvegarder" + }, + "plugins": { + "columns": { + "source": "Source" + }, + "importError_one": "Erreur d'importation de plugin", + "importError_other": "Erreurs d'importation de plugins", + "searchPlaceholder": "Rechercher par fichier" + }, + "pools": { + "add": "Ajouter un Pool", + "deferredSlotsIncluded": "Slots Différés Inclus", + "delete": { + "title": "Supprimer le Pool", + "warning": "Cela supprimera toutes les métadonnées liées au pool et peut affecter les tâches utilisant ce pool." + }, + "edit": "Modifier le Pool", + "form": { + "checkbox": "Cochez pour inclure les tâches différées lors du calcul des slots libres du pool", + "description": "Description", + "includeDeferred": "Inclure les Tâches Différées", + "nameMaxLength": "Le nom peut contenir un maximum de 250 caractères", + "nameRequired": "Le nom est requis", + "slots": "Slots" + }, + "noPoolsFound": "Aucun pool trouvé", + "pool_one": "Pool", + "pool_other": "Pools", + "searchPlaceholder": "Rechercher des Pools", + "sort": { + "asc": "Nom (A-Z)", + "desc": "Nom (Z-A)", + "placeholder": "Trier par" + } + }, + "providers": { + "columns": { + "packageName": "Nom du paquet", + "version": "Version" + } + }, + "variables": { + "add": "Ajouter une Variable", + "columns": { + "isEncrypted": "Est chiffrée" + }, + "delete": { + "deleteVariable_one": "Supprimer 1 Variable", + "deleteVariable_other": "Supprimer {{count}} Variables", + "firstConfirmMessage_one": "Vous êtes sur le point de supprimer la variable suivante :", + "firstConfirmMessage_other": "Vous êtes sur le point de supprimer les variables suivantes :", + "title": "Supprimer la Variable", + "tooltip": "Supprimer les variables sélectionnées" + }, + "edit": "Modifier la Variable", + "export": "Exporter", + "exportTooltip": "Exporter les variables sélectionnées", + "form": { + "invalidJson": "JSON invalide", + "keyMaxLength": "La clé peut contenir un maximum de 250 caractères", + "keyRequired": "La clé est requise", + "valueRequired": "La valeur est requise" + }, + "import": { + "button": "Importer", + "conflictResolution": "Sélectionner la Résolution de Conflit de Variable", + "errorParsingJsonFile": "Erreur lors de l'analyse du fichier JSON : Soumettez un fichier JSON contenant des variables (par exemple, {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "Refuser l'importation si une variable avec la même clé existe déjà.", + "title": "Échouer" + }, + "overwrite": { + "description": "Écraser la variable en cas de conflit.", + "title": "Écraser" + }, + "skip": { + "description": "Ignorer l'importation des variables qui existent déjà.", + "title": "Ignorer" + } + }, + "title": "Importer des Variables", + "upload": "Soumettre un Fichier JSON", + "uploadPlaceholder": "Soumettre un fichier JSON contenant des variables (par exemple, {\"key\": \"value\", ...})" + }, + "noRowsMessage": "Aucune variable trouvée", + "searchPlaceholder": "Rechercher des Variables", + "variable_one": "Variable", + "variable_other": "Variables" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/assets.json new file mode 100644 index 0000000000000..765a7faac69f1 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/assets.json @@ -0,0 +1,29 @@ +{ + "consumingDags": "Dags consomatteurs", + "createEvent": { + "button": "Créer un événement", + "manual": { + "description": "Créer directement un événement d'Asset", + "extra": "Champs supplémentaires de l'événement", + "label": "Manuel" + }, + "materialize": { + "description": "Déclencher le Dag en amont de cet asset", + "descriptionWithDag": "Déclencher le Dag en amont de cet asset : {{dagName}}", + "label": "Matérialiser", + "unpauseDag": "Réactiver {{dagName}} lors du déclenchement" + }, + "success": { + "manualDescription": "La création manuelle de l'événement a été réussie.", + "manualTitle": "Événement d'Asset créé", + "materializeDescription": "Le Dag en amont {{dagId}} a été déclenché avec succès.", + "materializeTitle": "Matérialisation de l'Asset" + }, + "title": "Créer un événement d'Asset pour {{name}}" + }, + "group": "Group", + "lastAssetEvent": "Dernier événement d'Asset", + "name": "Nom", + "producingTasks": "Tasks productrices", + "searchPlaceholder": "Rechercher des Assets" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/browse.json new file mode 100644 index 0000000000000..09b7d3de68768 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "Réduire tous les extra json", + "expandAllExtra": "Ouvrir tous les extra json" + }, + "columns": { + "event": "Événement", + "extra": "Extra", + "user": "Utilisateur", + "when": "Quand" + }, + "title": "Journal d'Audit" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "Clé", + "value": "Valeur" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/common.json new file mode 100644 index 0000000000000..4da39f775cf4d --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/common.json @@ -0,0 +1,271 @@ +{ + "admin": { + "Config": "Configuration", + "Connections": "Connexions", + "Plugins": "Plugins", + "Pools": "Pools", + "Providers": "Providers", + "Variables": "Variables" + }, + "asset_one": "Asset", + "asset_other": "Assets", + "assetEvent_one": "Événement d'Asset", + "assetEvent_other": "Événements d'Asset", + "backfill_one": "Rattrapage", + "backfill_other": "Rattrapages", + "browse": { + "auditLog": "Journal d'audit", + "xcoms": "XComs" + }, + "createdAssetEvent_one": "Événement d'Asset créé", + "createdAssetEvent_other": "Événements d'Asset créés", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "Rattrapage", + "concurrency": "Concurrence", + "dagRunTimeout": "Délai d'exécution du Dag", + "defaultArgs": "Arguments par défaut", + "description": "Description", + "documentation": "Documentation du Dag", + "fileLocation": "Emplacement du fichier", + "hasTaskConcurrencyLimits": "Limites de concurrence par tâche", + "lastExpired": "Date d'expiration", + "lastParsed": "Dernière analyse", + "latestDagVersion": "Dernière version du Dag", + "latestRun": "Dernière exécution", + "maxActiveRuns": "Exécutions actives max.", + "maxActiveTasks": "Tâches actives max.", + "maxConsecutiveFailedDagRuns": "Échecs consécutifs max.", + "nextRun": "Prochaine exécution", + "owner": "Propriétaire", + "params": "Paramètres", + "schedule": "Planification", + "tags": "Tags" + }, + "dagId": "ID du Dag", + "dagRun": { + "conf": "Configuration", + "dagVersions": "Version(s) du Dag", + "dataIntervalEnd": "Fin de l'intervalle de données", + "dataIntervalStart": "Début de l'intervalle de données", + "lastSchedulingDecision": "Dernière décision de planification", + "queuedAt": "Mis en file à", + "runAfter": "Exécuté après", + "runType": "Type d'exécution", + "sourceAssetEvent": "Événement d'Asset source", + "triggeredBy": "Déclenché par" + }, + "dagRun_one": "Exécution de Dag", + "dagRun_other": "Exécutions de Dag", + "dagWarnings": "Avertissements/erreurs du Dag", + "defaultToGraphView": "Vue par défaut : graphe", + "defaultToGridView": "Vue par défaut : grille", + "direction": "Direction", + "docs": { + "documentation": "Documentation", + "githubRepo": "Repo GitHub", + "restApiReference": "Référence API REST" + }, + "duration": "Durée", + "endDate": "Date de fin", + "error": { + "back": "Retour", + "defaultMessage": "Une erreur inattendue est survenue", + "home": "Accueil", + "notFound": "Page introuvable", + "title": "Erreur" + }, + "expression": { + "all": "Tous", + "and": "Et", + "any": "N'importe lequel", + "or": "Ou" + }, + "logicalDate": "Date logique", + "logout": "Déconnexion", + "logoutConfirmation": "Vous êtes sur le point de vous déconnecter de l'application.", + "mapIndex": "Map Index", + "modal": { + "cancel": "Annuler", + "confirm": "Confirmer", + "delete": { + "button": "Supprimer", + "confirmation": "Êtes-vous sûr de vouloir supprimer {{resourceName}} ? Cette action est irréversible." + } + }, + "nav": { + "admin": "Admin", + "assets": "Assets", + "browse": "Parcourir", + "dags": "Dags", + "docs": "Docs", + "home": "Accueil", + "plugins": "Plugins", + "security": "Sécurité" + }, + "noItemsFound": "Aucun {{modelName}} trouvé", + "note": { + "add": "Ajouter une note", + "dagRun": "Note d'exécution du Run", + "label": "Note", + "placeholder": "Ajouter une note...", + "taskInstance": "Note de Task Instance" + }, + "pools": { + "deferred": "Différé", + "open": "Libre", + "pools_one": "Pool", + "pools_other": "Pools", + "queued": "En file", + "running": "En cours", + "scheduled": "Planifié" + }, + "runId": "ID d'exécution", + "runTypes": { + "asset_triggered": "Déclenché par Asset", + "backfill": "Rattrapage", + "manual": "Manuel", + "scheduled": "Planifié" + }, + "seconds": "{{count}}s", + "security": { + "actions": "Actions", + "permissions": "Permissions", + "resources": "Ressources", + "roles": "Rôles", + "users": "Utilisateurs" + }, + "selectLanguage": "Choisir la langue", + "sourceAssetEvent_one": "Événement source", + "sourceAssetEvent_other": "Événements sources", + "startDate": "Date de début", + "state": "État", + "states": { + "deferred": "Différé", + "failed": "Échoué", + "no_status": "Aucun statut", + "none": "Aucun statut", + "queued": "En file", + "removed": "Supprimé", + "restarting": "Redémarrage", + "running": "En cours", + "scheduled": "Planifié", + "skipped": "Ignoré", + "success": "Succès", + "up_for_reschedule": "À replanifier", + "up_for_retry": "À réessayer", + "upstream_failed": "Échec en amont" + }, + "switchToDarkMode": "Passer en mode sombre", + "switchToLightMode": "Passer en mode clair", + "table": { + "completedAt": "Terminé à", + "createdAt": "Créé à", + "filterByTag": "Filtrer les Dags par tag", + "filterColumns": "Filtrer les colonnes du tableau", + "filterReset_one": "Réinitialiser le filtre", + "filterReset_other": "Réinitialiser les filtres", + "from": "De", + "maxActiveRuns": "Exécutions actives max.", + "noTagsFound": "Aucun tag trouvé", + "tagMode": { + "all": "Tous", + "any": "N'importe lequel" + }, + "tagPlaceholder": "Filtrer par tag", + "to": "À" + }, + "task": { + "documentation": "Documentation de la tâche", + "lastInstance": "Dernière instance", + "operator": "Opérateur", + "triggerRule": "Règle de déclenchement" + }, + "task_one": "Tâche", + "task_other": "Tâches", + "taskId": "ID de la tâche", + "taskInstance": { + "dagVersion": "Version du Dag", + "executor": "Exécuteur", + "executorConfig": "Configuration de l'exécuteur", + "hostname": "Nom d'hôte", + "maxTries": "Essais max.", + "pid": "PID", + "pool": "Pool", + "poolSlots": "Slots de pool", + "priorityWeight": "Poids de la priorité", + "queue": "File", + "queuedWhen": "Mis en file à", + "scheduledWhen": "Planifié à", + "triggerer": { + "assigned": "Déclencheur assigné", + "class": "Classe de déclencheur", + "createdAt": "Heure de création du déclencheur", + "id": "ID du déclencheur", + "latestHeartbeat": "Dernier battement du déclencheur", + "title": "Infos sur le déclencheur" + }, + "unixname": "Nom Unix" + }, + "taskInstance_one": "Instance de tâche", + "taskInstance_other": "Instances de tâche", + "timeRange": { + "last12Hours": "12 dernières heures", + "last24Hours": "24 dernières heures", + "lastHour": "Dernière heure", + "pastWeek": "Dernière semaine" + }, + "timezone": "Fuseau horaire", + "timezoneModal": { + "current-timezone": "Heure actuelle dans", + "placeholder": "Choisir un fuseau horaire", + "title": "Choisir un fuseau horaire", + "utc": "UTC (Temps universel coordonné)" + }, + "toaster": { + "bulkDelete": { + "error": "Échec de la suppression en masse de {{resourceName}}", + "success": { + "description": "{{count}} {{resourceName}} ont été supprimés avec succès. Clés : {{keys}}", + "title": "Requête de suppression en masse de {{resourceName}} soumise" + } + }, + "create": { + "error": "Échec de la création de {{resourceName}}", + "success": { + "description": "{{resourceName}} a été créé avec succès.", + "title": "Requête de création de {{resourceName}} soumise" + } + }, + "delete": { + "error": "Échec de la suppression de {{resourceName}}", + "success": { + "description": "{{resourceName}} a été supprimé avec succès.", + "title": "Requête de suppression de {{resourceName}} soumise" + } + }, + "import": { + "error": "Échec de l'importation de {{resourceName}}", + "success": { + "description": "{{count}} {{resourceName}} ont été importés avec succès.", + "title": "Requête d'importation de {{resourceName}} soumise" + } + }, + "update": { + "error": "Échec de la mise à jour de {{resourceName}}", + "success": { + "description": "{{resourceName}} a été mis à jour avec succès.", + "title": "Requête de mise à jour de {{resourceName}} soumise" + } + } + }, + "triggered": "Déclenché", + "tryNumber": "Numéro de l'essai", + "user": "Utilisateur", + "wrap": { + "tooltip": "Appuyez sur {{hotkey}} pour activer/désactiver le retour à la ligne", + "unwrap": "Désactiver le retour à la ligne", + "wrap": "Activer le retour à la ligne" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/components.json new file mode 100644 index 0000000000000..08f08c3605e4b --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/components.json @@ -0,0 +1,132 @@ +{ + "backfill": { + "affected_one": "1 exécution sera déclenchée.", + "affected_other": "{{count}} exécutions seront déclenchées.", + "affectedNone": "Aucune exécution ne correspond aux critères sélectionnés.", + "backwards": "Exécuter à rebours", + "dateRange": "Plage de dates", + "dateRangeFrom": "De", + "dateRangeTo": "À", + "errorStartDateBeforeEndDate": "La date de début doit être antérieure à la date de fin", + "maxRuns": "Nombre maximum d'exécutions actives", + "reprocessBehavior": "Comportement de réexécution", + "run": "Lancer le rattrapage", + "selectDescription": "Exécuter ce Dag pour une plage de dates", + "selectLabel": "Rattrapage", + "title": "Lancer un rattrapage", + "toaster": { + "success": { + "description": "Les tâches de rattrapage ont été déclenchées avec succès.", + "title": "Rattrapage généré" + } + }, + "tooltip": "Le rattrapage nécessite une planification", + "unpause": "Réactiver {{dag_display_name}} lors du déclenchement", + "validation": { + "datesRequired": "Les dates de début et de fin de l'intervalle de données doivent être renseignées.", + "startBeforeEnd": "La date de début de l'intervalle de données doit être antérieure ou égale à la date de fin." + } + }, + "banner": { + "backfillInProgress": "Rattrapage en cours", + "cancel": "Annuler le rattrapage", + "pause": "Mettre en pause le rattrapage", + "unpause": "Réactiver le rattrapage" + }, + "clipboard": { + "copy": "Copier" + }, + "close": "Fermer", + "configForm": { + "advancedOptions": "Options avancées", + "configJson": "Configuration JSON", + "invalidJson": "Format JSON invalide : {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "1 erreur", + "errorAndWarning": "1 erreur et {{warningText}}", + "warning_one": "1 avertissement", + "warning_other": "{{count}} avertissements" + }, + "durationChart": { + "duration": "Durée (secondes)", + "lastDagRun_one": "Dernière exécution du Dag", + "lastDagRun_other": "Dernières {{count}} exécutions du Dag", + "lastTaskInstance_one": "Dernière Task Instance", + "lastTaskInstance_other": "Dernières {{count}} Task Instances", + "queuedDuration": "Durée en file d'attente", + "runAfter": "Exécuté après", + "runDuration": "Durée d'exécution" + }, + "fileUpload": { + "files_other": "{{count}} fichiers" + }, + "flexibleForm": { + "placeholder": "Sélectionner une valeur", + "placeholderArray": "Entrez chaque mot sur une ligne séparée", + "placeholderExamples": "Commencez à taper pour voir les options", + "placeholderMulti": "Sélectionner une ou plusieurs valeurs", + "validationErrorArrayNotArray": "La valeur doit être un tableau.", + "validationErrorArrayNotNumbers": "Tous les éléments du tableau doivent être des nombres.", + "validationErrorArrayNotObject": "Tous les éléments du tableau doivent être des objets.", + "validationErrorRequired": "Ce champ est requis" + }, + "graph": { + "directionDown": "De haut en bas", + "directionLeft": "De droite à gauche", + "directionRight": "De gauche à droite", + "directionUp": "De bas en haut", + "downloadImage": "Télécharger l'image du graphe", + "downloadImageError": "Échec du téléchargement de l'image du graphe.", + "downloadImageErrorTitle": "Échec du téléchargement", + "otherDagRuns": "+Autres exécutions du Dag", + "taskCount_one": "{{count}} tâche", + "taskCount_other": "{{count}} tâches", + "taskGroup": "Groupe de tâches" + }, + "limitedList": "+{{count}} supplémentaires", + "logs": { + "file": "Fichier", + "in": "dans", + "line": "ligne" + }, + "reparseDag": "Analyser le Dag", + "sortedAscending": "tri croissant", + "sortedDescending": "tri décroissant", + "sortedUnsorted": "non trié", + "taskTries": "Essais de tâche", + "toggleCardView": "Afficher en mode cartes", + "toggleTableView": "Afficher en mode tableau", + "triggerDag": { + "button": "Déclencher", + "loading": "Chargement des informations du Dag...", + "loadingFailed": "Échec du chargement des informations du Dag. Veuillez réessayer.", + "runIdHelp": "Optionnel – sera généré s'il n'est pas fourni", + "selectDescription": "Déclencher une exécution unique de ce Dag", + "selectLabel": "Exécution unique", + "title": "Déclencher un Dag", + "toaster": { + "success": { + "description": "L'exécution du Dag a été déclenchée avec succès.", + "title": "Dag déclenché" + } + }, + "unpause": "Réactiver {{dagDisplayName}} lors du déclenchement" + }, + "trimText": { + "details": "Détails", + "empty": "Vide", + "noContent": "Aucun contenu disponible." + }, + "versionDetails": { + "bundleLink": "Lien du bundle", + "bundleName": "Nom du bundle", + "bundleVersion": "Version du bundle", + "createdAt": "Créé le", + "versionId": "ID de version" + }, + "versionSelect": { + "dagVersion": "Version du Dag", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/dag.json new file mode 100644 index 0000000000000..e06075bbb79d3 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/dag.json @@ -0,0 +1,108 @@ +{ + "allRuns": "Tous les Runs", + "blockingDeps": { + "dependency": "Dépendance", + "reason": "Raison", + "title": "Dépendances bloquant la planification de la tâche" + }, + "code": { + "bundleUrl": "URL du bundle", + "noCode": "Aucun code trouvé", + "parsedAt": "Analysé le :" + }, + "extraLinks": "Liens supplémentaires", + "grid": { + "buttons": { + "resetToLatest": "Réinitialiser à la dernière version", + "toggleGroup": "Afficher/Masquer le groupe" + } + }, + "header": { + "buttons": { + "dagDocs": "Documentation du Dag" + } + }, + "logs": { + "allLevels": "Tous les niveaux de journalisation", + "allSources": "Toutes les sources", + "fullscreen": { + "button": "Plein écran", + "tooltip": "Appuyez sur {{hotkey}} pour le plein écran" + }, + "noTryNumber": "Aucun essai", + "viewInExternal": "Voir les journaux dans {{name}} (tentative {{attempt}})" + }, + "overview": { + "buttons": { + "failedRun_one": "Run échoué", + "failedRun_other": "Runs échoués", + "failedTask_one": "Tâche échouée", + "failedTask_other": "Tâches échouées", + "failedTaskInstance_one": "Task Instance échouée", + "failedTaskInstance_other": "Task Instances échouées" + }, + "charts": { + "assetEvent_one": "Événement d'actif créé", + "assetEvent_other": "Événements d'actif créés" + }, + "failedLogs": { + "title": "Journaux des tâches échouées récemment", + "viewFullLogs": "Voir les journaux complets" + } + }, + "panel": { + "buttons": { + "options": "Options", + "showGraph": "Afficher le graphe", + "showGrid": "Afficher la grille" + }, + "dagRuns": { + "label": "Nombre de Runs du Dag" + }, + "dependencies": { + "label": "Dépendances", + "options": { + "allDagDependencies": "Toutes les dépendances du Dag", + "externalConditions": "Conditions externes", + "onlyTasks": "Tâches uniquement" + }, + "placeholder": "Dépendances" + }, + "graphDirection": { + "label": "Orientation du graphe" + } + }, + "paramsFailed": "Échec du chargement des paramètres", + "parse": { + "toaster": { + "error": { + "description": "La demande d'analyse du Dag a échoué. Il se peut qu'il y ait encore des demandes en attente.", + "title": "Échec de l'analyse du Dag" + }, + "success": { + "description": "Le Dag devrait être analysé sous peu.", + "title": "Demande d'analyse soumise avec succès" + } + } + }, + "tabs": { + "assetEvents": "Événements d'actifs", + "auditLog": "Journal d'audit", + "backfills": "Rattrappages", + "code": "Code", + "details": "Détails", + "logs": "Journaux", + "mappedTaskInstances_one": "Task Instance [{{count}}]", + "mappedTaskInstances_other": "Task Instances [{{count}}]", + "overview": "Aperçu", + "renderedTemplates": "Modèles rendus", + "runs": "Runs", + "taskInstances": "Task Instances", + "tasks": "Tâches", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "Réduire tous les groupes de tâches", + "expandAll": "Développer tous les groupes de tâches" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/dags.json new file mode 100644 index 0000000000000..0a474e9acd40e --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/dags.json @@ -0,0 +1,87 @@ +{ + "assetSchedule": "{{count}} sur {{total}} assets mis à jour", + "dagActions": { + "delete": { + "button": "Supprimer le Dag", + "warning": "Cela supprimera toutes les métadonnées liées au Dag, y compris les exécutions et les tâches." + } + }, + "filters": { + "allRunTypes": "Tous les types de Run", + "allStates": "Tous les états", + "paused": { + "active": "Actif", + "all": "Tous", + "paused": "En pause" + } + }, + "ownerLink": "Lien du propriétaire pour {{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "Aucune tâche trouvée.", + "title": "Tâches concernées : {{count}}" + }, + "clear": { + "button": "Réinitialiser {{type}}", + "buttonTooltip": "Appuyez sur Maj+c pour réinitialiser", + "error": "Échec de la réinitialisation de {{type}}", + "title": "Réinitialiser {{type}}" + }, + "delete": { + "button": "Supprimer {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "Supprimer {{type}}", + "warning": "Cela supprimera toutes les métadonnées liées à {{type}}." + }, + "error": "Erreur lors de la suppression de {{type}}", + "success": { + "description": "La demande de suppression de {{type}} a été effectuée avec succès.", + "title": "{{type}} supprimé avec succès" + } + }, + "markAs": { + "button": "Marquer {{type}} comme...", + "buttonTooltip": { + "failed": "Appuyez sur Maj+f pour marquer comme échoué", + "success": "Appuyez sur Maj+s pour marquer comme réussi" + }, + "title": "Marquer {{type}} comme {{state}}" + }, + "options": { + "downstream": "En aval", + "existingTasks": "Réinitialiser les tâches existantes", + "future": "Futur", + "onlyFailed": "Réinitialiser uniquement les tâches échouées", + "past": "Passé", + "queueNew": "Ajouter de nouvelles tâches en file d'attente", + "upstream": "En amont" + } + }, + "search": { + "advanced": "Recherche avancée", + "clear": "Effacer la recherche", + "dags": "Rechercher des Dags", + "hotkey": "+K", + "tasks": "Rechercher des tâches" + }, + "sort": { + "displayName": { + "asc": "Trier par nom affiché (A-Z)", + "desc": "Trier par nom affiché (Z-A)" + }, + "lastRunStartDate": { + "asc": "Trier par date de début du dernier Run (du plus ancien au plus récent)", + "desc": "Trier par date de début du dernier Run (du plus récent au plus ancien)" + }, + "lastRunState": { + "asc": "Trier par état du dernier Run (A-Z)", + "desc": "Trier par état du dernier Run (Z-A)" + }, + "nextDagRun": { + "asc": "Trier par date du prochain Run (du plus ancien au plus récent)", + "desc": "Trier par date du prochain Run (du plus récent au plus ancien)" + }, + "placeholder": "Trier par" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/fr/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/fr/dashboard.json new file mode 100644 index 0000000000000..8a80e4b16f9de --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/fr/dashboard.json @@ -0,0 +1,38 @@ +{ + "group": "Groupe", + "health": { + "dagProcessor": "Analyseur de Dag", + "health": "État de santé", + "healthy": "Sain", + "lastHeartbeat": "Dernier battement", + "metaDatabase": "Base de données Meta", + "scheduler": "Planificateur", + "status": "Statut", + "triggerer": "Déclencheur", + "unhealthy": "Non sain" + }, + "history": "Historique", + "importErrors": { + "dagImportError_one": "Erreur d'importation de Dag", + "dagImportError_other": "Erreurs d'importation de Dag", + "searchByFile": "Rechercher par fichier", + "timestamp": "Horodatage" + }, + "managePools": "Gérer les pools", + "noAssetEvents": "Aucun événement d'actif trouvé.", + "poolSlots": "Emplacements de pool", + "sortBy": { + "newestFirst": "Plus récents d'abord", + "oldestFirst": "Plus anciens d'abord" + }, + "source": "Source", + "stats": { + "activeDags": "Dags actifs", + "failedDags": "Dags échoués", + "queuedDags": "Dags en attente", + "runningDags": "Dags en cours d'exécution", + "stats": "Statistiques" + }, + "uri": "URI", + "welcome": "Bienvenue" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/admin.json new file mode 100644 index 0000000000000..c8e6f61cd1de0 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/admin.json @@ -0,0 +1,167 @@ +{ + "columns": { + "description": "תיאור", + "key": "מפתח", + "name": "שם", + "value": "ערך" + }, + "config": { + "columns": { + "section": "קטגוריה" + }, + "title": "תצורת Airflow" + }, + "connections": { + "add": "הוסף חיבור חדש", + "columns": { + "connectionId": "מזהה חיבור", + "connectionType": "סוג חיבור", + "host": "כתובת", + "port": "פורט" + }, + "connection_one": "חיבור", + "connection_other": "חיבורים", + "delete": { + "deleteConnection_one": "מחק חיבור אחד", + "deleteConnection_other": "מחק {{count}} חיבורים", + "firstConfirmMessage_one": "אתה עומד למחוק את החיבור הבא:", + "firstConfirmMessage_other": "אתה עומד למחוק את החיבורים הבאים:", + "title": "מחק חיבור" + }, + "edit": "ערוך חיבור", + "form": { + "connectionIdRequired": "נדרש מזהה חיבור", + "connectionIdRequirement": "מזהה חיבור אינו יכול להכיל רווחים בלבד", + "connectionTypeRequired": "נדרש סוג חיבור", + "extraFields": "שדות נוספים", + "extraFieldsJson": "שדות נוספים JSON", + "helperText": "סוג החיבור המבוקש חסר? וודא שהתקנת את חבילת העזר המתאימה", + "helperTextForRedactedFields": "שדות שהוסתרו ('***') יישארו ללא שינוי אם לא בוצע בהם עדכון", + "selectConnectionType": "בחר סוג חיבור", + "standardFields": "שדות סטנדרטיים" + }, + "nothingFound": { + "description": "חיבורים המוגדרים באמצעות משתני סביבה או מנהלי סודות אינם מוצגים כאן.", + "documentationLink": "למד עוד בתיעוד של Airflow.", + "learnMore": "אלה נפתרים בזמן ריצה ואינם גלויים בממשק המשתמש.", + "title": "לא נמצאו חיבורים" + }, + "searchPlaceholder": "חפש חיבורים", + "test": "בדוק חיבור", + "testDisabled": "אפשרות זו אינה זמינה. פנו למנהל המערכת להפעלתה", + "typeMeta": { + "error": "נכשל באחזור מטא סוג חיבור", + "standardFields": { + "description": "תיאור", + "host": "מארח", + "login": "מזהה התחברות", + "password": "סיסמה", + "port": "פורט", + "url_schema": "סכימה" + } + } + }, + "deleteActions": { + "button": "מחק", + "modal": { + "confirmButton": "אשר מחיקה", + "secondConfirmMessage": "פעולה זו איננה הפיכה", + "thirdConfirmMessage": " האם אתה בטוח שברצונך להמשיך?" + }, + "selected": "נבחר", + "tooltip": "מחק חיבורים נבחרים" + }, + "formActions": { + "reset": "אתחל", + "save": "שמור" + }, + "plugins": { + "columns": { + "source": "מקור" + }, + "importError_one": "שגיאה בייבוא תוסף", + "importError_other": "שגיאות בייבוא תוסף", + "searchPlaceholder": "חפש לפי קובץ" + }, + "pools": { + "add": "הוסף מאגר משאבים", + "deferredSlotsIncluded": "כולל מקומות שמורים למשימות מושהות", + "delete": { + "title": "מחק מאגר משאבים", + "warning": "זה יימחק את כל המטא-נתונים המקושרים עם המאגר ועלולים להשפיע על משימות המקושרות עם המאגר" + }, + "edit": "ערוך מאגר משאבים", + "form": { + "checkbox": "סמן כדי לכלול משימות מושהות", + "description": "תיאור", + "includeDeferred": "כלול משימות מושהות", + "nameMaxLength": "השם יכול לכלול 256 תווים לכל היותר", + "nameRequired": "נדרש שם", + "slots": "מקומות שמורים" + }, + "noPoolsFound": "לא נמצאו מאגרי משאבים", + "pool_one": "מאגר משאבים", + "pool_other": "מאגרי משאבים", + "searchPlaceholder": "חפש מאגרי משאבים", + "sort": { + "asc": "שם (A-Z)", + "desc": "שם (Z-A)", + "placeholder": "סדר לפי" + } + }, + "providers": { + "columns": { + "packageName": "שם חבילה", + "version": "גירסה" + } + }, + "variables": { + "add": "הוסף משתנה", + "columns": { + "isEncrypted": "האם מוצפן?" + }, + "delete": { + "deleteVariable_one": "מחר משתנה אחד", + "deleteVariable_other": "מחק {{count}} משתנים", + "firstConfirmMessage_one": "אתה עומד למחוק את מאגר המשאבים הבא:", + "firstConfirmMessage_other": "אתה עומד למחוק את מאגרי המשאבים הבאים:", + "title": "מחק מאגר משאבים", + "tooltip": "מחר משתנים נבחרים" + }, + "edit": "ערוך משתנה", + "export": "ייצא", + "exportTooltip": "ייצא משתנים נבחרים", + "form": { + "invalidJson": "JSON לא תקין", + "keyMaxLength": "מפתח יכול להכיל מקסימום 250 תווים", + "keyRequired": "נדרש מפתח", + "valueRequired": "נדרש ערך" + }, + "import": { + "button": "ייבא", + "conflictResolution": "בחר מנגנון ליישוב קונפליקט משתנים", + "errorParsingJsonFile": "שגיאה בפירסור קובץ JSON: העלה קובץ JSON המכיל משתנים (לדוגמה, {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "הכשלת הייבוא אם מזוהים משתנים קיימים.", + "title": "הכשל" + }, + "overwrite": { + "description": "שכתוב (Overwrite) של המשתנה במקרה של קונפליקט.", + "title": "שכתב" + }, + "skip": { + "description": "דילוג על ייבוא משתנים שכבר קיימים.", + "title": "דלג" + } + }, + "title": "ייבא משתנים", + "upload": "העלה קובץ JSON", + "uploadPlaceholder": "העלה קובץ JSON המכיל משתנים (לדוגמה, {\"key\": \"value\", ...})" + }, + "noRowsMessage": "לא נמצאו משתנים", + "searchPlaceholder": "חפש מאגרי משאבים", + "variable_one": "משתנה", + "variable_other": "משתנים" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/assets.json new file mode 100644 index 0000000000000..64c57f2029ce6 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "צורכים Dags", + "createEvent": { + "button": "יצירת אירוע", + "manual": { + "description": "יצירה ידנית של אירוע בכנס", + "extra": "תוכן אירוע בנכס", + "label": "ידני" + }, + "materialize": { + "description": "הפעלת ה-Dag שנמצא במעלה הזרם של נכס זה", + "descriptionWithDag": "הפעלת ה-Dag שנמצא במעלה הזרם של נכס זה: {{dagName}}", + "label": "מימוש", + "unpauseDag": "בטל השהייה של {{dagName}} בהפעלה" + }, + "success": { + "manualDescription": "יצירה ידנית של אירוע בנכס הושלמה בהצלחה.", + "manualTitle": "אירוע בנכס נוצר", + "materializeDescription": "Dag {{dagId}} במעלה הזרם הופעל בהצלחה.", + "materializeTitle": "מבצע מימוש לנכס" + }, + "title": "יצירת אירוע בנכס עבור {{name}}" + }, + "group": "קבוצה", + "lastAssetEvent": "אירוע אחרון בנכס", + "name": "שם", + "producingTasks": "מייצר משימות", + "scheduledDags": "Dags מתוזמנים", + "searchPlaceholder": "חיפוש נכסים" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/browse.json new file mode 100644 index 0000000000000..d7075652a7dd5 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "כווץ את כל ה-JSON הנוסף", + "expandAllExtra": "הרחב את כל ה-JSON הנוסף" + }, + "columns": { + "event": "אירוע", + "extra": "תוכן", + "user": "משתמש", + "when": "מתי" + }, + "title": "יומן מערכת" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "מפתח", + "value": "ערך" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/common.json new file mode 100644 index 0000000000000..f28c263b2ae1d --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/common.json @@ -0,0 +1,302 @@ +{ + "admin": { + "Config": "הגדרות", + "Connections": "חיבורים", + "Plugins": "תוספים", + "Pools": "מאגר משאבים", + "Providers": "חבילות עזר", + "Variables": "משתנים" + }, + "asset_one": "נכס", + "asset_other": "נכסים", + "assetEvent_one": "אירוע נכס", + "assetEvent_other": "אירועי נכסים", + "backfill_one": "השלמה למפרע", + "backfill_other": "השלמות למפרע", + "browse": { + "auditLog": "יומן מערכת", + "requiredActions": "פעולות נדרשות", + "xcoms": "XComs" + }, + "collapseDetailsPanel": "כווץ לוח פרטים", + "createdAssetEvent_one": "אירוע נכס שנוצר", + "createdAssetEvent_other": "אירועי נכס שנוצרו", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "השלמה אוטומטית", + "concurrency": "מספר הרצות מקבילות", + "dagRunTimeout": "זמן תפוגה להרצת Dag", + "defaultArgs": "פרמטרי ברירת מחדל", + "description": "תיאור", + "documentation": "תיעוד Dag", + "fileLocation": "מיקום קובץ", + "hasTaskConcurrencyLimits": "מגבלות ריצה מקבילה", + "lastExpired": "פג תוקף לאחרונה", + "lastParsed": "ניתוח אחרון", + "latestDagVersion": "גרסת Dag אחרונה", + "latestRun": "הרצה אחרונה", + "maxActiveRuns": "מספר מרבי של הרצות פעילות", + "maxActiveTasks": "מספר מרבי של משימות פעילות", + "maxConsecutiveFailedDagRuns": "מספר מרבי של כשלונות רצופים", + "nextRun": "ההרצה הבאה", + "owner": "בעלים", + "params": "פרמטרים", + "schedule": "תזמון", + "tags": "תגיות" + }, + "dagId": "מזהה Dag", + "dagRun": { + "conf": "תצורה", + "dagVersions": "גרסאות Dag", + "dataIntervalEnd": "סיום מקטע נתונים", + "dataIntervalStart": "תחילת מקטע נתונים", + "lastSchedulingDecision": "החלטת תזמון אחרונה", + "queuedAt": "זמן כניסה לתור", + "runAfter": "זמן ההרצה מוקדם ביותר", + "runType": "סוג הרצה", + "sourceAssetEvent": "אירוע נכס מקור", + "triggeredBy": "הופעל על-ידי", + "triggeringUser": "שם משתמש מפעיל" + }, + "dagRun_one": "הרצת Dag", + "dagRun_other": "הרצת Dag", + "dagRunId": "מזהה הרצת Dag", + "dagWarnings": "אזהרות/שגיאות Dag", + "defaultToGraphView": "תצוגת גרף כברירת מחדל", + "defaultToGridView": "תצוגת רשת כברירת מחדל", + "direction": "כיוון", + "docs": { + "documentation": "תיעוד", + "githubRepo": "מאגר GitHub", + "restApiReference": "תיעוד REST API" + }, + "duration": "משך זמן", + "endDate": "תאריך סיום", + "error": { + "back": "חזור", + "defaultMessage": "התרחשה שגיאה בלתי צפויה", + "home": "דף הבית", + "notFound": "הדף לא נמצא", + "title": "שגיאה" + }, + "expand": { + "collapse": "כווץ", + "expand": "הרחב", + "hotkey": "e", + "tooltip": "לחץ {{hotkey}} לכיווץ או הרחבה" + }, + "expression": { + "all": "הכל", + "and": "וגם", + "any": "כלשהו", + "or": "או" + }, + "logicalDate": "תאריך לוגי", + "logout": "התנתק", + "logoutConfirmation": "האם אתה בטוח שברצונך להתנתק?", + "mapIndex": "אינדקס מיפוי", + "modal": { + "cancel": "ביטול", + "confirm": "אישור", + "delete": { + "button": "מחק", + "confirmation": "האם אתה בטוח שברצונך למחוק את {{resourceName}}? פעולה זו אינה הפיכה" + } + }, + "nav": { + "admin": "ניהול", + "assets": "נכסים", + "browse": "עיון", + "dags": "Dags", + "docs": "תיעוד", + "home": "דף הבית", + "legacyFabViews": "תצוגות מורשת", + "plugins": "תוספים", + "security": "אבטחה" + }, + "noItemsFound": "לא נמצאו {{modelName}}", + "note": { + "add": "הוסף הערה", + "dagRun": "הערת הרצת Dag", + "label": "הערה", + "placeholder": "הוסף הערה...", + "taskInstance": "הערת משימה" + }, + "pools": { + "deferred": "נדחה לתזמון עתידי", + "open": "פתוח", + "pools_one": "מאגר משאבים", + "pools_other": "מאגרי משאבים", + "queued": "בתור", + "running": "בביצוע", + "scheduled": "מתוזמן" + }, + "runId": "מזהה הרצה", + "runTypes": { + "asset_triggered": "הופעל על-ידי נכס", + "backfill": "השלמה למפרע", + "manual": "הפעלה ידנית", + "scheduled": "מתוזמן" + }, + "scroll": { + "direction": { + "bottom": "תחתית", + "top": "עליון" + }, + "tooltip": "לחץ {{hotkey}} לגלילה ל{{direction}}" + }, + "seconds": "{{count}} שניות", + "security": { + "actions": "פעולות", + "permissions": "הרשאות", + "resources": "משאבים", + "roles": "תפקידים", + "users": "משתמשים" + }, + "selectLanguage": "בחר שפה", + "showDetailsPanel": "הצג לוח פרטים", + "source": { + "hide": "הסתר מקור", + "hotkey": "s", + "show": "הצג מקור" + }, + "sourceAssetEvent_one": "אירוע נכס מקור", + "sourceAssetEvent_other": "אירועי נכס מקור", + "startDate": "תאריך התחלה", + "state": "מצב", + "states": { + "deferred": "נדחה", + "failed": "נכשל", + "no_status": "ללא סטטוס", + "none": "ללא סטטוס", + "queued": "בתור", + "removed": "הוסר", + "restarting": "מופעל מחדש", + "running": "בביצוע", + "scheduled": "מתוזמן", + "skipped": "דולג", + "success": "הצליח", + "up_for_reschedule": "ממתין לתזמון מחדש", + "up_for_retry": "ממתין לניסיון חוזר", + "upstream_failed": "משימות קודמות נכשלו" + }, + "switchToDarkMode": "עבור למצב כהה", + "switchToLightMode": "עבור למצב בהיר", + "table": { + "completedAt": "הושלם בתאריך", + "createdAt": "נוצר בתאריך", + "filterByTag": "סנן לפי תגית", + "filterColumns": "סנן עמודות", + "filterReset_one": "אפס מסנן", + "filterReset_other": "אפס מסננים", + "from": "מ-", + "maxActiveRuns": "מספר הרצות פעילות מרבי", + "noTagsFound": "לא נמצאו תגיות", + "tagMode": { + "all": "הכל", + "any": "כלשהו" + }, + "tagPlaceholder": "סנן לפי תגית", + "to": "עד" + }, + "task": { + "documentation": "תיעוד משימה", + "lastInstance": "משימה אחרונה", + "operator": "אופרטור", + "triggerRule": "כלל הפעלה" + }, + "task_one": "משימה", + "task_other": "משימות", + "taskId": "מזהה משימה", + "taskInstance": { + "dagVersion": "גרסת Dag", + "executor": "מבצע", + "executorConfig": "הגדרות מבצע", + "hostname": "שם מארח", + "maxTries": "מספר נסיונות מרבי", + "pid": "מזהה תהליך", + "pool": "מאגר משאבים", + "poolSlots": "מכסת משאבים", + "priorityWeight": "משקל עדיפות", + "queue": "תור", + "queuedWhen": "זמן כניסה לתור", + "scheduledWhen": "מועד תזמון", + "triggerer": { + "assigned": "מפעיל מוקצה", + "class": "סוג מפעיל", + "createdAt": "זמן יצירת מפעיל", + "id": "מזהה מפעיל", + "latestHeartbeat": "עדכון חיים אחרון", + "title": "פרטי מפעיל" + }, + "unixname": "שם משתמש Unix" + }, + "taskInstance_one": "משימה בודדת", + "taskInstance_other": "משימות", + "timeRange": { + "last12Hours": "12 השעות האחרונות", + "last24Hours": "24 השעות האחרונות", + "lastHour": "השעה האחרונה", + "pastWeek": "השבוע האחרון" + }, + "timestamp": { + "hide": "הסתר חותמות זמן", + "hotkey": "t", + "show": "הצג חותמות זמן" + }, + "timezone": "אזור זמן", + "timezoneModal": { + "current-timezone": "אזור זמן נוכחי", + "placeholder": "חפש אזור זמן", + "title": "בחר אזור זמן", + "utc": "UTC" + }, + "toaster": { + "bulkDelete": { + "error": "שגיאה במחיקה מרובה של {{resourceName}}", + "success": { + "description": "נמחקו בהצלחה {{count}} {{resourceName}}. מזהים: {{keys}}", + "title": "בקשת מחיקה מרובה של {{resourceName}} נשלחה" + } + }, + "create": { + "error": "שגיאה ביצירת {{resourceName}}", + "success": { + "description": "{{resourceName}} נוצר בהצלחה", + "title": "בקשת יצירת {{resourceName}} נשלחה" + } + }, + "delete": { + "error": "שגיאה במחיקת {{resourceName}}", + "success": { + "description": "{{resourceName}} נמחק בהצלחה", + "title": "בקשת מחיקת {{resourceName}} נשלחה" + } + }, + "import": { + "error": "שגיאה בייבוא {{resourceName}}", + "success": { + "description": "יובאו בהצלחה {{count}} {{resourceName}}", + "title": "בקשת ייבוא {{resourceName}} נשלחה" + } + }, + "update": { + "error": "שגיאה בעדכון {{resourceName}}", + "success": { + "description": "{{resourceName}} עודכן בהצלחה", + "title": "בקשת עדכון {{resourceName}} נשלחה" + } + } + }, + "total": "סה״כ {{state}}", + "triggered": "הופעל", + "tryNumber": "מספר ניסיון", + "user": "משתמש", + "wrap": { + "hotkey": "w", + "tooltip": "לחץ {{hotkey}} להפעלת/כיבוי גלישת טקסט", + "unwrap": "בטל גלישת טקסט", + "wrap": "הפעל גלישת טקסט" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json new file mode 100644 index 0000000000000..2483fcbaa19ec --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json @@ -0,0 +1,136 @@ +{ + "backfill": { + "affected_one": "ריצה אחת תופעל.", + "affected_other": "{{count}} ריצות תופעלנה.", + "affectedNone": "לא נמצאו ריצות התואמות לקריטריונים שנבחרו.", + "allRuns": "כל ההרצות", + "backwards": "הרץ לאחור", + "dateRange": "טווח תאריכים", + "dateRangeFrom": "מתאריך", + "dateRangeTo": "עד תאריך", + "errorStartDateBeforeEndDate": "תאריך ההתחלה חייב להיות לפני תאריך הסיום", + "maxRuns": "מספר ריצות מקבילות מירבי", + "missingAndErroredRuns": "הרצות חסרות ושגויות", + "missingRuns": "הרצות חסרות", + "reprocessBehavior": "התנהגות עיבוד מחדש", + "run": "הרץ Backfill", + "selectDescription": "הרץ Dag זה עבור טווח תאריכים", + "selectLabel": "Backfill", + "title": "הרץ Backfill", + "toaster": { + "success": { + "description": "משימות ה-Backfill הופעלו בהצלחה.", + "title": "Backfill נוצר" + } + }, + "tooltip": "Backfill דורש לוח זמנים", + "unpause": "הפעל את {{dag_display_name}} בעת ההרצה", + "validation": { + "datesRequired": "יש להזין גם תאריך התחלה וגם תאריך סיום של מרווח הנתונים.", + "startBeforeEnd": "תאריך התחלת מרווח הנתונים חייב להיות קטן או שווה לתאריך סיום מרווח הנתונים." + } + }, + "banner": { + "backfillInProgress": "Backfill בתהליך", + "cancel": "בטל backfill", + "pause": "השהה backfill", + "unpause": "הפעל מחדש backfill" + }, + "clipboard": { + "copy": "העתק" + }, + "close": "סגור", + "configForm": { + "advancedOptions": "אפשרויות מתקדמות", + "configJson": "הגדרות JSON", + "invalidJson": "מבנה ה-JSON אינו תקין: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "שגיאה אחת", + "error_other": "שגיאה אחת", + "errorAndWarning": "שגיאה אחת ו-{{warningText}}", + "warning_one": "אזהרה אחת", + "warning_other": "{{count}} אזהרות" + }, + "durationChart": { + "duration": "משך זמן (שניות)", + "lastDagRun_one": "ריצת Dag אחרונה", + "lastDagRun_other": "{{count}} ריצות Dag אחרונות", + "lastTaskInstance_one": "משימה אחרונה", + "lastTaskInstance_other": "{{count}} משימות אחרונות", + "queuedDuration": "זמן המתנה בתור", + "runAfter": "הרץ אחרי", + "runDuration": "משך זמן ריצה" + }, + "fileUpload": { + "files_one": "{{count}} קבצים", + "files_other": "{{count}} קבצים" + }, + "flexibleForm": { + "placeholder": "בחר ערך", + "placeholderArray": "הכנס כל מחרוזת בשורה חדשה", + "placeholderExamples": "התחל להקליד כדי לראות אפשרויות", + "placeholderMulti": "בחר ערך אחד או יותר", + "validationErrorArrayNotArray": "הערך חייב להיות מערך.", + "validationErrorArrayNotNumbers": "כל האיברים במערך חייבים להיות מספרים.", + "validationErrorArrayNotObject": "כל האיברים במערך חייבים להיות אובייקטים.", + "validationErrorRequired": "שדה זה הוא חובה" + }, + "graph": { + "directionDown": "מלמעלה למטה", + "directionLeft": "מימין לשמאל", + "directionRight": "משמאל לימין", + "directionUp": "מלמטה למעלה", + "downloadImage": "הורד תמונת גרף", + "downloadImageError": "הורדת תמונת הגרף נכשלה.", + "downloadImageErrorTitle": "ההורדה נכשלה", + "otherDagRuns": "+ ריצות Dag נוספות", + "taskCount_one": "משימה אחת", + "taskCount_other": "{{count}} משימות", + "taskGroup": "קבוצת משימות" + }, + "limitedList": "+ {{count}} נוספים", + "logs": { + "file": "קובץ", + "location": "שורה {{line}} ב{{name}}" + }, + "reparseDag": "נתח Dag מחדש", + "sortedAscending": "ממוין בסדר עולה", + "sortedDescending": "ממוין בסדר יורד", + "sortedUnsorted": "לא ממוין", + "taskTries": "ניסיונות משימה", + "toggleCardView": "הצג תצוגת כרטיסים", + "toggleTableView": "הצג תצוגת טבלה", + "triggerDag": { + "button": "הפעל", + "loading": "טוען מידע Dag...", + "loadingFailed": "טעינת מידע ה-Dag נכשלה. אנא נסו שוב.", + "runIdHelp": "אופציונלי - ייווצר אוטומטית אם לא יסופק", + "selectDescription": "הפעל ריצה בודדת של Dag זה", + "selectLabel": "ריצה בודדת", + "title": "הפעל Dag", + "toaster": { + "success": { + "description": "ריצת ה-Dag הופעלה בהצלחה.", + "title": "ריצת Dag הופעלה" + } + }, + "unpause": "הפעל את {{dagDisplayName}} בעת ההרצה" + }, + "trimText": { + "details": "פרטים", + "empty": "ריק", + "noContent": "אין תוכן זמין." + }, + "versionDetails": { + "bundleLink": "קישור לחבילה", + "bundleName": "שם החבילה", + "bundleVersion": "גרסת החבילה", + "createdAt": "נוצר ב", + "versionId": "מזהה גרסה" + }, + "versionSelect": { + "dagVersion": "גרסת Dag", + "versionCode": "גרסה {{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/dag.json new file mode 100644 index 0000000000000..d7b58d0d4d29f --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/dag.json @@ -0,0 +1,121 @@ +{ + "allRuns": "כל ההרצות", + "blockingDeps": { + "dependency": "תלות", + "reason": "סיבה", + "title": "תלויות החוסמות את תזמון המשימה" + }, + "code": { + "bundleUrl": "כתובת החבילה", + "noCode": "לא נמצא קוד", + "parsedAt": "נקרא בשעה:" + }, + "extraLinks": "קישורים נוספים", + "grid": { + "buttons": { + "resetToLatest": "אפס לגרסה האחרונה", + "toggleGroup": "החלף מצב קבוצה" + } + }, + "header": { + "buttons": { + "advanced": "מתקדם", + "dagDocs": "תיעוד Dag" + } + }, + "logs": { + "allLevels": "כל רמות הרישום", + "allSources": "כל המקורות", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", + "fullscreen": { + "button": "מסך מלא", + "tooltip": "לחץ {{hotkey}} למסך מלא" + }, + "info": "INFO", + "noTryNumber": "אין מספר ניסיון", + "settings": "הגדרות רישום", + "viewInExternal": "צפה ברישומים ב-{{name}} (ניסיון {{attempt}})", + "warning": "WARNING" + }, + "navigation": { + "jump": "קפיצה: Shift+{{arrow}}", + "navigation": "ניווט: {{arrow}}", + "toggleGroup": "החלפת קבוצה: רווח" + }, + "overview": { + "buttons": { + "failedRun_one": "הרצה שנכשלה", + "failedRun_other": "הרצות שנכשלו", + "failedTask_one": "משימה שנכשלה", + "failedTask_other": "משימות שנכשלו", + "failedTaskInstance_one": "מופע משימה שנכשל", + "failedTaskInstance_other": "מופעי משימות שנכשלו" + }, + "charts": { + "assetEvent_one": "אירוע בנכס שנוצר", + "assetEvent_other": "אירועי בנכסים שנוצרו" + }, + "failedLogs": { + "title": "רישומים אחרונים של משימות שנכשלו", + "viewFullLogs": "צפה ברישומים המלאים" + } + }, + "panel": { + "buttons": { + "options": "אפשרויות", + "showGraph": "הצג תרשים", + "showGrid": "הצג רשת" + }, + "dagRuns": { + "label": "מספר הרצות Dag" + }, + "dependencies": { + "label": "תלויות", + "options": { + "allDagDependencies": "כל תלויות ה-Dag", + "externalConditions": "תנאים חיצוניים", + "onlyTasks": "משימות בלבד" + }, + "placeholder": "תלויות" + }, + "graphDirection": { + "label": "כיוון התרשים" + } + }, + "paramsFailed": "טעינת הפרמטרים נכשלה", + "parse": { + "toaster": { + "error": { + "description": "ניתוח ה-Dag נכשל. ייתכן שישנן בקשות ניתוח ממתינות.", + "title": "ניתוח מחדש של ה-Dag נכשל" + }, + "success": { + "description": "ה-Dag יתנתח מחדש בקרוב.", + "title": "בקשת הניתוח מחדש נשלחה בהצלחה" + } + } + }, + "tabs": { + "assetEvents": "אירועי משאבים", + "auditLog": "יומן ביקורת", + "backfills": "השלמות רטרואקטיביות", + "code": "קוד תכנות", + "details": "פרטים", + "logs": "רישומים", + "mappedTaskInstances_one": "מופע משימה [{{count}}]", + "mappedTaskInstances_other": "מופעי משימות [{{count}}]", + "overview": "סקירה כללית", + "renderedTemplates": "תבניות מעובדות", + "requiredActions": "פעולות נדרשות", + "runs": "הרצות", + "taskInstances": "מופעי משימות", + "tasks": "משימות", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "כווץ את כל קבוצות המשימות", + "expandAll": "הרחב את כל קבוצות המשימות" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/dags.json new file mode 100644 index 0000000000000..15729c5f1e866 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "{{count}} מתוך {{total}} נכסים עודכנו", + "dagActions": { + "delete": { + "button": "מחק Dag", + "warning": "זה ימחק את כל המטא-נתונים הקשורים ל-Dag, כולל ריצות ומשימות" + } + }, + "favoriteDag": "הוסף Dag למועדפים", + "filters": { + "allRunTypes": "כל סוגי הריצות", + "allStates": "כל המצבים", + "favorite": { + "all": "הכל", + "favorite": "מועדף", + "unfavorite": "לא מועדף" + }, + "paused": { + "active": "פעיל", + "all": "הכל", + "paused": "מושהה" + }, + "runIdPatternFilter": "חפש הרצת Dag", + "triggeringUserNameFilter": "חפש לפי שם המשתמש המפעיל" + }, + "ownerLink": "קישור בעלים ל-{{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "לא נמצאו משימות.", + "title": "משימות שהושפעו: {{count}}" + }, + "clear": { + "button": "נקה {{type}}", + "buttonTooltip": "לחץ shift+c כדי לנקות", + "error": "נכשל לנקות {{type}}", + "title": "נקה {{type}}" + }, + "delete": { + "button": "מחק {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "מחק {{type}}", + "warning": "זה ימחק את כל המטא-נתונים הקשורים ל-{{type}}." + }, + "error": "שגיאה במחיקת {{type}}", + "success": { + "description": "בקשת מחיקת ה{{type}} הייתה מוצלחת.", + "title": "{{type}} נמחק בהצלחה." + } + }, + "markAs": { + "button": "סמן {{type}} כ...", + "buttonTooltip": { + "failed": "לחץ shift+f כדי לסמן ככשלון", + "success": "לחץ shift+s כדי לסמן כהצלחה" + }, + "title": "סמן {{type}} כ{{state}}" + }, + "options": { + "downstream": "במורד הזרם", + "existingTasks": "נקה משימות קיימות", + "future": "בעתיד", + "onlyFailed": "נקה משימות שנכשלו בלבד", + "past": "בעבר", + "queueNew": "תזמן משימות חדשות", + "runOnLatestVersion": "הרץ על גרסת החבילה האחרונה", + "upstream": "במעלה הזרם" + } + }, + "search": { + "advanced": "חיפוש מתקדם", + "clear": "נקה חיפוש", + "dags": "חפש Dags", + "hotkey": "+K", + "tasks": "חפש משימות" + }, + "sort": { + "displayName": { + "asc": "מיין לפי שם תצוגה (A-Z)", + "desc": "מיין לפי שם תצוגה (Z-A)" + }, + "lastRunStartDate": { + "asc": "מיין לפי תאריך התחלת ריצה אחרון (מהמוקדם למאוחר)", + "desc": "מיין לפי תאריך התחלת ריצה אחרון (מהמאוחר למוקדם)" + }, + "lastRunState": { + "asc": "מיין לפי מצב ריצה אחרון (A-Z)", + "desc": "מיין לפי מצב ריצה אחרון (Z-A)" + }, + "nextDagRun": { + "asc": "מיין לפי ריצת ה-Dag הבאה (מהמוקדם למאוחר)", + "desc": "מיין לפי ריצת ה-Dag הבאה (מהמאוחר למוקדם)" + }, + "placeholder": "מיין לפי" + }, + "unfavoriteDag": "הסר Dag ממועדפים" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/dashboard.json new file mode 100644 index 0000000000000..69a12d87f5ddc --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/dashboard.json @@ -0,0 +1,45 @@ +{ + "favorite": { + "favoriteDags_one": "{{count}} Dags מועדפים ראשונים", + "favoriteDags_other": "{{count}} Dags מועדפים ראשונים", + "noDagRuns": "אין עדיין הרצת Dag עבור Dag זה.", + "noFavoriteDags": "אין עדיין מועדפים. לחץ על סמל הכוכב ליד Dag ברשימה כדי להוסיף אותו למועדפים." + }, + "group": "קבוצה", + "health": { + "dagProcessor": "Dag מנתח", + "health": "תקינות", + "healthy": "תקין", + "lastHeartbeat": "פעימה אחרונה", + "metaDatabase": "מטא מסד-נתונים", + "scheduler": "מתזמן", + "status": "סטאטוס", + "triggerer": "מנהל תהליכים בהמתנה", + "unhealthy": "לא תקין" + }, + "history": "היסטוריה", + "importErrors": { + "dagImportError_one": "שגיאות בטעינת Dags", + "dagImportError_other": "שגיאות בטעינת Dags", + "searchByFile": "חפש לפי קובץ", + "timestamp": "חותמת זמן" + }, + "managePools": "ניהול מאגרים", + "noAssetEvents": "אין אירועים בנכסים", + "poolSlots": "סטאטוס מאגרים", + "sortBy": { + "newestFirst": "חדש קודם", + "oldestFirst": "ישן קודם" + }, + "source": "מקור", + "stats": { + "activeDags": "Dags פעיל", + "failedDags": "Dags בכשלון", + "queuedDags": "Dags בתור", + "requiredActions": "פעולות נדרשות", + "runningDags": "Dags בריצה", + "stats": "סטטיסטיקות" + }, + "uri": "URI", + "welcome": "ברוכים הבאים" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/hitl.json new file mode 100644 index 0000000000000..aa20fd7ca1f41 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/hitl.json @@ -0,0 +1,23 @@ +{ + "requiredAction_one": "פעולה נדרשת", + "requiredAction_other": "פעולות נדרשות", + "requiredActionState": "מצב פעולה נדרשת", + "response": { + "error": "התגובה נכשלה", + "optionsDescription": "בחר את האפשרויות למופע המשימה הזה", + "optionsLabel": "אפשרויות", + "received": "תגובה התקבלה ב", + "respond": "הגב", + "success": "התגובה של {{taskId}} הצליחה", + "title": "מופע משימה ידנית - {{taskId}}" + }, + "state": { + "approvalReceived": "האישור התקבל", + "approvalRequired": "נדרש אישור", + "choiceReceived": "הבחירה התקבלה", + "choiceRequired": "נדרשת בחירה", + "rejectionReceived": "הדחייה התקבלה", + "responseReceived": "התגובה התקבלה", + "responseRequired": "נדרשת תגובה" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/admin.json new file mode 100644 index 0000000000000..5acf37c678506 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/admin.json @@ -0,0 +1,167 @@ +{ + "columns": { + "description": "설명", + "key": "키", + "name": "이름", + "value": "값" + }, + "config": { + "columns": { + "section": "섹션" + }, + "title": "Airflow 구성" + }, + "connections": { + "add": "커넥션 추가", + "columns": { + "connectionId": "커넥션 ID", + "connectionType": "커넥션 유형", + "host": "호스트", + "port": "포트" + }, + "connection_one": "커넥션", + "connection_other": "커넥션들", + "delete": { + "deleteConnection_one": "커넥션 1개 삭제", + "deleteConnection_other": "커넥션 {{count}}개 삭제", + "firstConfirmMessage_one": "다음 커넥션을 삭제하려고 합니다:", + "firstConfirmMessage_other": "다음 커넥션들을 삭제하려고 합니다:", + "title": "커넥션 삭제" + }, + "edit": "커넥션 편집", + "form": { + "connectionIdRequired": "커넥션 ID는 필수 항목입니다", + "connectionIdRequirement": "커넥션 ID는 공백만으로 구성될 수 없습니다", + "connectionTypeRequired": "커넥션 유형은 필수 항목입니다", + "extraFields": "추가 필드", + "extraFieldsJson": "추가 필드 JSON", + "helperText": "커넥션 유형이 보이지 않나요? 해당 Airflow Provider 패키지가 설치되어 있는지 확인하세요.", + "helperTextForRedactedFields": "('***')로 마스킹된 필드는 수정하지 않으면 그대로 유지됩니다.", + "selectConnectionType": "커넥션 유형 선택", + "standardFields": "기본 필드" + }, + "nothingFound": { + "description": "환경 변수나 시크릿 매니저를 통해 정의된 커넥션은 여기에 표시되지 않습니다.", + "documentationLink": "자세한 내용은 Airflow 문서를 참고하세요.", + "learnMore": "이러한 커넥션은 런타임 시점에 해결되며 UI에는 표시되지 않습니다.", + "title": "커넥션을 찾을 수 없습니다." + }, + "searchPlaceholder": "커넥션 검색", + "test": "커넥션 테스트", + "testDisabled": "커넥션 테스트 기능이 비활성화되어 있습니다. 관리자에게 활성화를 요청하세요.", + "typeMeta": { + "error": "커넥션 유형 메타 정보를 가져오지 못했습니다", + "standardFields": { + "description": "설명", + "host": "호스트", + "login": "로그인", + "password": "비밀번호", + "port": "포트", + "url_schema": "스키마" + } + } + }, + "deleteActions": { + "button": "삭제", + "modal": { + "confirmButton": "네, 삭제합니다", + "secondConfirmMessage": "이 작업은 영구적으로 삭제되며 되돌릴 수 없습니다.", + "thirdConfirmMessage": "정말로 계속하시겠습니까?" + }, + "selected": "선택됨", + "tooltip": "선택된 커넥션들 삭제" + }, + "formActions": { + "reset": "초기화", + "save": "저장" + }, + "plugins": { + "columns": { + "source": "소스" + }, + "importError_one": "플러그인 가져오기 오류", + "importError_other": "플러그인 가져오기 오류들", + "searchPlaceholder": "파일로 검색" + }, + "pools": { + "add": "풀 추가", + "deferredSlotsIncluded": "연기된 슬롯 포함됨", + "delete": { + "title": "풀 삭제", + "warning": "이 작업은 풀과 관련된 모든 메타데이터를 제거하며, 해당 풀을 사용하는 태스크에 영향을 줄 수 있습니다." + }, + "edit": "풀 편집", + "form": { + "checkbox": "열린 풀 슬롯 게산시 연기된 작업 포함", + "description": "설명", + "includeDeferred": "연기됨 포함", + "nameMaxLength": "이름은 최대 256자까지 입력할 수 있습니다", + "nameRequired": "이름은 필수입니다", + "slots": "슬롯" + }, + "noPoolsFound": "풀을 찾을 수 없습니다", + "pool_one": "풀", + "pool_other": "풀", + "searchPlaceholder": "풀 검색", + "sort": { + "asc": "이름 (A-Z)", + "desc": "이름 (Z-A)", + "placeholder": "정렬 기준" + } + }, + "providers": { + "columns": { + "packageName": "패키지 이름", + "version": "버전" + } + }, + "variables": { + "add": "변수 추가", + "columns": { + "isEncrypted": "암호화됨 여부" + }, + "delete": { + "deleteVariable_one": "변수 1개 삭제", + "deleteVariable_other": "변수 {{count}}개 삭제", + "firstConfirmMessage_one": "다음 변수를 삭제하려고 합니다:", + "firstConfirmMessage_other": "다음 변수들을 삭제하려고 합니다:", + "title": "변수 삭제", + "tooltip": "선택된 변수들 삭제" + }, + "edit": "변수 편집", + "export": "내보내기", + "exportTooltip": "선택된 변수들 내보내기", + "form": { + "invalidJson": "유효하지 않은 JSON입니다", + "keyMaxLength": "키는 최대 250자까지 입력할 수 있습니다", + "keyRequired": "키는 필수입니다", + "valueRequired": "값은 필수입니다" + }, + "import": { + "button": "가져오기", + "conflictResolution": "변수 충돌 해결 방법 선택", + "errorParsingJsonFile": "JSON 파일 구문 분석 오류: 변수들이 포함된 JSON 파일을 업로드하세요 (예: {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "기존 변수가 존재하는 경우 가져오기를 실패합니다.", + "title": "실패" + }, + "overwrite": { + "description": "충돌 시 해당 변수를 덮어씁니다.", + "title": "덮어쓰기" + }, + "skip": { + "description": "이미 존재하는 변수는 가져오기를 건너뜁니다.", + "title": "건너뛰기" + } + }, + "title": "변수 가져오기", + "upload": "JSON 파일 업로드", + "uploadPlaceholder": "변수를 포함한 JSON 파일 업로드 (e.g., {\"key\": \"value\", ...})" + }, + "noRowsMessage": "변수를 찾을 수 없습니다", + "searchPlaceholder": "키 검색", + "variable_one": "변수", + "variable_other": "변수들" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/assets.json new file mode 100644 index 0000000000000..3311814842bc4 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "사용 중인 Dags", + "createEvent": { + "button": "이벤트 생성", + "manual": { + "description": "에셋 이벤트를 수동으로 생성", + "extra": "에셋 이벤트 추가 정보", + "label": "수동 생성" + }, + "materialize": { + "description": "이 에셋의 상위 Dag을 트리거합니다", + "descriptionWithDag": "이 에셋의 상위 Dag을 트리거합니다: {{dagName}}", + "label": "구체화", + "unpauseDag": "트리거 시 {{dagName}}의 일시중지를 해제합니다" + }, + "success": { + "manualDescription": "에셋 이벤트가 성공적으로 수동 생성되었습니다.", + "manualTitle": "에셋 이벤트 생성 완료", + "materializeDescription": "상위 Dag {{dagId}}가 성공적으로 트리거되었습니다.", + "materializeTitle": "에셋 구체화 중" + }, + "title": "{{name}}에 대한 에셋 이벤트 생성" + }, + "group": "그룹", + "lastAssetEvent": "마지막 에셋 이벤트", + "name": "이름", + "producingTasks": "생성하는 태스크", + "scheduledDags": "스케줄 된 Dags", + "searchPlaceholder": "에셋 검색" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/browse.json new file mode 100644 index 0000000000000..ee5f1b9d1e405 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "모든 추가 JSON 접기", + "expandAllExtra": "모든 추가 JSON 펼치기" + }, + "columns": { + "event": "이벤트", + "extra": "추가 정보", + "user": "사용자", + "when": "시점" + }, + "title": "감사 로그" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "키", + "value": "값" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/common.json new file mode 100644 index 0000000000000..a524a18d01883 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/common.json @@ -0,0 +1,302 @@ +{ + "admin": { + "Config": "설정", + "Connections": "커넥션들", + "Plugins": "플러그인들", + "Pools": "풀들", + "Providers": "제공자들", + "Variables": "변수들" + }, + "asset_one": "에셋", + "asset_other": "에셋들", + "assetEvent_one": "에셋 이벤트", + "assetEvent_other": "에셋 이벤트들", + "backfill_one": "백필", + "backfill_other": "백필들", + "browse": { + "auditLog": "감사 로그", + "requiredActions": "필수 작업", + "xcoms": "XComs" + }, + "collapseDetailsPanel": "세부 정보 패널 접기", + "createdAssetEvent_one": "생성된 에셋 이벤트", + "createdAssetEvent_other": "생성된 에셋 이벤트들", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "캐치업", + "concurrency": "동시 샐행 수", + "dagRunTimeout": "Dag 실행 제한 시간", + "defaultArgs": "기본 인자", + "description": "설명", + "documentation": "Dag 문서", + "fileLocation": "파일 위치", + "hasTaskConcurrencyLimits": "태스크 동시성 제한 존재 여부", + "lastExpired": "마지막 만료 시점", + "lastParsed": "마지막 파싱 시점", + "latestDagVersion": "최신 Dag 버전", + "latestRun": "최근 실행", + "maxActiveRuns": "최대 활성 실행 수", + "maxActiveTasks": "최대 활성 태스크 수", + "maxConsecutiveFailedDagRuns": "연속 실패한 Dag 실행 최대 횟수", + "nextRun": "다음 실행", + "owner": "소유자", + "params": "매개변수", + "schedule": "스케줄", + "tags": "태그" + }, + "dagId": "Dag ID", + "dagRun": { + "conf": "구성", + "dagVersions": "Dag 버전", + "dataIntervalEnd": "데이터 구간 종료", + "dataIntervalStart": "데이터 구간 시작", + "lastSchedulingDecision": "마지막 스케줄링 결정", + "queuedAt": "대기열에 추가된 시간", + "runAfter": "다음 실행", + "runType": "실행 유형", + "sourceAssetEvent": "소스 에셋 이벤트", + "triggeredBy": "실행 주체", + "triggeringUser": "트리거한 사용자 이름" + }, + "dagRun_one": "Dag 실행", + "dagRun_other": "Dag 실행들", + "dagRunId": "Dag 실행 ID", + "dagWarnings": "Dag 경고/오류", + "defaultToGraphView": "그래프 뷰 기본 보기", + "defaultToGridView": "그리드 뷰 기본 보기", + "direction": "방향", + "docs": { + "documentation": "문서", + "githubRepo": "GitHub 저장소", + "restApiReference": "REST API 참조" + }, + "duration": "기간", + "endDate": "종료일", + "error": { + "back": "뒤로", + "defaultMessage": "예기치 않은 오류가 발생했습니다.", + "home": "홈", + "notFound": "페이지를 찾을 수 없습니다.", + "title": "오류" + }, + "expand": { + "collapse": "접기", + "expand": "펼치기", + "hotkey": "e", + "tooltip": "{{hotkey}}를 눌러 펼치기/접기합니다." + }, + "expression": { + "all": "모두", + "and": "그리고", + "any": "모든", + "or": "또는" + }, + "logicalDate": "논리적 날짜", + "logout": "로그아웃", + "logoutConfirmation": "애플리케이션에서 로그아웃하시겠습니까?", + "mapIndex": "맵 인덱스", + "modal": { + "cancel": "취소", + "confirm": "확인", + "delete": { + "button": "삭제", + "confirmation": "{{resourceName}}을(를) 삭제하시겠습니까? 이 작업은 되돌릴 수 없습니다." + } + }, + "nav": { + "admin": "관리자", + "assets": "에셋", + "browse": "탐색", + "dags": "Dags", + "docs": "문서", + "home": "홈", + "legacyFabViews": "레거시 FAB 뷰", + "plugins": "플러그인", + "security": "보안" + }, + "noItemsFound": "{{modelName}} 을(를) 찾을 수 없음", + "note": { + "add": "메모 추가", + "dagRun": "Dag 실행 메모", + "label": "메모", + "placeholder": "메모 추가...", + "taskInstance": "작업 인스턴스 메모" + }, + "pools": { + "deferred": "연기됨", + "open": "열림", + "pools_one": "풀", + "pools_other": "풀", + "queued": "대기 중", + "running": "실행 중", + "scheduled": "예약됨" + }, + "runId": "실행 ID", + "runTypes": { + "asset_triggered": "에셋 트리거", + "backfill": "백필", + "manual": "수동", + "scheduled": "예정됨" + }, + "scroll": { + "direction": { + "bottom": "아래", + "top": "위" + }, + "tooltip": "{{hotkey}}를 눌러 {{direction}}로 스크롤" + }, + "seconds": "{{count}}초", + "security": { + "actions": "작업", + "permissions": "권한", + "resources": "리소스", + "roles": "역할", + "users": "사용자" + }, + "selectLanguage": "언어 선택", + "showDetailsPanel": "세부 정보 패널 펼치기", + "source": { + "hide": "소스 숨기기", + "hotkey": "s", + "show": "소스 보기" + }, + "sourceAssetEvent_one": "소스 에셋 이벤트", + "sourceAssetEvent_other": "소스 에셋 이벤트", + "startDate": "시작일", + "state": "상태", + "states": { + "deferred": "연기됨", + "failed": "실패", + "no_status": "상태 없음", + "none": "상태 없음", + "queued": "대기 중", + "removed": "제거됨", + "restarting": "다시 시작 중", + "running": "실행 중", + "scheduled": "예약됨", + "skipped": "건너뜀", + "success": "성공", + "up_for_reschedule": "재예약 대기 중", + "up_for_retry": "재시도 대기 중", + "upstream_failed": "업스트림 실패" + }, + "switchToDarkMode": "다크 모드로 전환", + "switchToLightMode": "라이트 모드로 전환", + "table": { + "completedAt": "완료 시간", + "createdAt": "생성 시간", + "filterByTag": "태그로 Dag 필터링", + "filterColumns": "테이블 열 필터링", + "filterReset_one": "필터 초기화", + "filterReset_other": "필터 초기화", + "from": "시작", + "maxActiveRuns": "최대 활성 실행 수", + "noTagsFound": "태그를 찾을 수 없습니다.", + "tagMode": { + "all": "모두", + "any": "모든" + }, + "tagPlaceholder": "태그로 필터링", + "to": "종료" + }, + "task": { + "documentation": "작업 문서", + "lastInstance": "마지막 인스턴스", + "operator": "연산자", + "triggerRule": "트리거 규칙" + }, + "task_one": "작업", + "task_other": "작업들", + "taskId": "작업 ID", + "taskInstance": { + "dagVersion": "Dag 버전", + "executor": "실행기", + "executorConfig": "실행기 구성", + "hostname": "호스트 이름", + "maxTries": "최대 시도 횟수", + "pid": "PID", + "pool": "풀", + "poolSlots": "풀 슬롯", + "priorityWeight": "우선순위 가중치", + "queue": "큐", + "queuedWhen": "대기열에 추가된 시간", + "scheduledWhen": "예정된 시간", + "triggerer": { + "assigned": "할당된 트리거", + "class": "트리거 클래스", + "createdAt": "트리거 생성 시간", + "id": "트리거 ID", + "latestHeartbeat": "최신 트리거 상태", + "title": "트리거 정보" + }, + "unixname": "유닉스 이름" + }, + "taskInstance_one": "작업 인스턴스", + "taskInstance_other": "작업 인스턴스들", + "timeRange": { + "last12Hours": "지난 12 시간", + "last24Hours": "지난 24 시간", + "lastHour": "지난 1시간", + "pastWeek": "지난 주" + }, + "timestamp": { + "hide": "타임스탬프 숨기기", + "hotkey": "t", + "show": "타임스탬프 보기" + }, + "timezone": "시간대", + "timezoneModal": { + "current-timezone": "현재 시간:", + "placeholder": "시간대 선택", + "title": "시간대 선택", + "utc": "UTC (협정 세계시)" + }, + "toaster": { + "bulkDelete": { + "error": "대량 삭제 {{resourceName}} 요청 실패", + "success": { + "description": "{{count}}개 {{resourceName}}이(가) 성공적으로 삭제되었습니다. 키: {{keys}}", + "title": "대량 삭제 {{resourceName}} 요청 제출됨" + } + }, + "create": { + "error": "생성 {{resourceName}} 요청 실패", + "success": { + "description": "{{resourceName}}이(가) 성공적으로 생성되었습니다.", + "title": "생성 {{resourceName}} 요청 제출됨" + } + }, + "delete": { + "error": "삭제 {{resourceName}} 요청 실패", + "success": { + "description": "{{resourceName}}이(가) 성공적으로 삭제되었습니다.", + "title": "삭제 {{resourceName}} 요청 제출됨" + } + }, + "import": { + "error": "가져오기 {{resourceName}} 요청 실패", + "success": { + "description": "{{count}}개 {{resourceName}}이(가) 성공적으로 가져와졌습니다.", + "title": "가져오기 {{resourceName}} 요청 제출됨" + } + }, + "update": { + "error": "업데이트 {{resourceName}} 요청 실패", + "success": { + "description": "{{resourceName}}이(가) 성공적으로 업데이트되었습니다.", + "title": "업데이트 {{resourceName}} 요청 제출됨" + } + } + }, + "total": "총 {{state}} 수", + "triggered": "트리거됨", + "tryNumber": "시도 횟수", + "user": "사용자", + "wrap": { + "hotkey": "w", + "tooltip": "{{hotkey}}를 눌러 텍스트 줄바꿈 토글", + "unwrap": "줄바꿈 해제", + "wrap": "줄바꿈" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/components.json new file mode 100644 index 0000000000000..3395f29de4452 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/components.json @@ -0,0 +1,134 @@ +{ + "backfill": { + "affected_one": "1개의 실행이 트리거됩니다.", + "affected_other": "{{count}}개의 실행이 트리거됩니다.", + "affectedNone": "선택한 조건과 일치하는 실행이 없습니다.", + "allRuns": "모든 실행", + "backwards": "거꾸로 실행", + "dateRange": "날짜 범위", + "dateRangeFrom": "시작", + "dateRangeTo": "종료", + "errorStartDateBeforeEndDate": "시작일은 종료일보다 빨라야 합니다.", + "maxRuns": "최대 활성 실행 수", + "missingAndErroredRuns": "누락되었거나 오류가 발생한 실행", + "missingRuns": "누락된 실행", + "reprocessBehavior": "재처리 동작", + "run": "백필 실행", + "selectDescription": "이 Dag을(를) 특정 날짜 범위에 대해 실행합니다.", + "selectLabel": "백필", + "title": "백필 실행", + "toaster": { + "success": { + "description": "백필 작업이 성공적으로 트리거되었습니다.", + "title": "백필 생성됨" + } + }, + "tooltip": "백필에는 일정이 필요합니다.", + "unpause": "트리거 시 {{dag_display_name}} 일시 중지 해제", + "validation": { + "datesRequired": "데이터 구간 시작일과 종료일이 모두 제공되어야 합니다.", + "startBeforeEnd": "데이터 구간 시작일은 데이터 구간 종료일보다 작거나 같아야 합니다." + } + }, + "banner": { + "backfillInProgress": "백필 진행 중", + "cancel": "백필 취소", + "pause": "백필 일시 중지", + "unpause": "백필 일시 중지 해제" + }, + "clipboard": { + "copy": "복사" + }, + "close": "닫기", + "configForm": { + "advancedOptions": "고급 옵션", + "configJson": "구성 JSON", + "invalidJson": "잘못된 JSON 형식: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "오류 1개", + "errorAndWarning": "오류 1개 및 {{warningText}}", + "warning_one": "경고 1개", + "warning_other": "경고 {{count}}개" + }, + "durationChart": { + "duration": "기간 (초)", + "lastDagRun_one": "마지막 Dag 실행", + "lastDagRun_other": "마지막 {{count}}개 Dag 실행", + "lastTaskInstance_one": "마지막 작업 인스턴스", + "lastTaskInstance_other": "마지막 {{count}}개 작업 인스턴스", + "queuedDuration": "대기열 대기 시간", + "runAfter": "실행 후", + "runDuration": "실행 기간" + }, + "fileUpload": { + "files_other": "{{count}}개 파일" + }, + "flexibleForm": { + "placeholder": "값 선택", + "placeholderArray": "각 문자열을 새 줄에 입력하세요.", + "placeholderExamples": "입력하여 옵션 보기", + "placeholderMulti": "하나 또는 여러 값 선택", + "validationErrorArrayNotArray": "값은 배열이어야 합니다.", + "validationErrorArrayNotNumbers": "배열의 모든 요소는 숫자여야 합니다.", + "validationErrorArrayNotObject": "배열의 모든 요소는 객체여야 합니다.", + "validationErrorRequired": "이 필드는 필수입니다." + }, + "graph": { + "directionDown": "위에서 아래로", + "directionLeft": "오른쪽에서 왼쪽으로", + "directionRight": "왼쪽에서 오른쪽으로", + "directionUp": "아래에서 위로", + "downloadImage": "그래프 이미지 다운로드", + "downloadImageError": "그래프 이미지를 다운로드하지 못했습니다.", + "downloadImageErrorTitle": "다운로드 실패", + "otherDagRuns": "+다른 Dag 실행", + "taskCount_one": "{{count}}개 작업", + "taskCount_other": "{{count}}개 작업", + "taskGroup": "작업 그룹" + }, + "limitedList": "+{{count}}개 더 보기", + "logs": { + "file": "파일", + "location": "{{name}}의 {{line}}번째 줄" + }, + "reparseDag": "Dag 재구문 분석", + "sortedAscending": "오름차순 정렬됨", + "sortedDescending": "내림차순 정렬됨", + "sortedUnsorted": "정렬되지 않음", + "taskTries": "작업 시도 횟수", + "toggleCardView": "카드 보기 표시", + "toggleTableView": "테이블 보기 표시", + "triggerDag": { + "button": "트리거", + "loading": "Dag 정보 로드 중...", + "loadingFailed": "Dag 정보를 로드하지 못했습니다. 다시 시도해주세요.", + "runIdHelp": "선택 사항 - 제공되지 않으면 생성됩니다.", + "selectDescription": "이 Dag을(를) 단일 실행 트리거", + "selectLabel": "단일 실행", + "title": "Dag 트리거", + "toaster": { + "success": { + "description": "Dag 실행이 성공적으로 트리거되었습니다.", + "title": "Dag 실행 트리거됨" + } + }, + "unpause": "트리거 시 {{dagDisplayName}} 일시 중지 해제" + }, + "trimText": { + "details": "세부 정보", + "empty": "비어 있음", + "noContent": "내용 없음." + }, + "versionDetails": { + "bundleLink": "번들 링크", + "bundleName": "번들 이름", + "bundleVersion": "번들 버전", + "createdAt": "생성 시간", + "versionId": "버전 ID" + }, + "versionSelect": { + "dagVersion": "Dag 버전", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dag.json new file mode 100644 index 0000000000000..9639d6bcf4ed0 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dag.json @@ -0,0 +1,116 @@ +{ + "allRuns": "모든 실행", + "blockingDeps": { + "dependency": "종속성", + "reason": "이유", + "title": "작업 스케줄링을 막는 종속성" + }, + "code": { + "bundleUrl": "번들 URL", + "noCode": "코드를 찾을 수 없습니다.", + "parsedAt": "구문 분석 시간:" + }, + "extraLinks": "추가 링크", + "grid": { + "buttons": { + "resetToLatest": "최신 버전으로 재설정", + "toggleGroup": "그룹 토글" + } + }, + "header": { + "buttons": { + "advanced": "고급", + "dagDocs": "Dag 문서" + } + }, + "logs": { + "allLevels": "모든 로그 레벨", + "allSources": "모든 소스", + "critical": "크리티컬", + "debug": "디버그", + "error": "오류", + "fullscreen": { + "button": "전체 화면", + "tooltip": "전체 화면을 보려면 {{hotkey}}를 누르세요." + }, + "info": "정보", + "noTryNumber": "시도 횟수 없음", + "settings": "로그 설정", + "viewInExternal": "{{name}}에서 로그 보기 (시도 {{attempt}})", + "warning": "경고" + }, + "overview": { + "buttons": { + "failedRun_one": "실패한 실행", + "failedRun_other": "실패한 실행", + "failedTask_one": "실패한 작업", + "failedTask_other": "실패한 작업", + "failedTaskInstance_one": "실패한 작업 인스턴스", + "failedTaskInstance_other": "실패한 작업 인스턴스" + }, + "charts": { + "assetEvent_one": "생성된 에셋 이벤트", + "assetEvent_other": "생성된 에셋 이벤트" + }, + "failedLogs": { + "title": "최근 실패한 작업 로그", + "viewFullLogs": "전체 로그 보기" + } + }, + "panel": { + "buttons": { + "options": "옵션", + "showGraph": "그래프 보기", + "showGrid": "그리드 보기" + }, + "dagRuns": { + "label": "Dag 실행 수" + }, + "dependencies": { + "label": "종속성", + "options": { + "allDagDependencies": "모든 Dag 종속성", + "externalConditions": "외부 조건", + "onlyTasks": "작업만" + }, + "placeholder": "종속성" + }, + "graphDirection": { + "label": "그래프 방향" + } + }, + "paramsFailed": "매개변수 로드 실패", + "parse": { + "toaster": { + "error": { + "description": "Dag 구문 분석 요청에 실패했습니다. 처리되지 않은 보류 중인 구문 분석 요청이 있을 수 있습니다.", + "title": "Dag 재구문 분석 실패" + }, + "success": { + "description": "Dag이(가) 곧 재구문 분석될 예정입니다.", + "title": "재구문 분석 요청이 성공적으로 제출되었습니다." + } + } + }, + "tabs": { + "assetEvents": "에셋 이벤트", + "auditLog": "감사 로그", + "backfills": "백필", + "code": "코드", + "details": "세부 정보", + "logs": "로그", + "mappedTaskInstances_one": "작업 인스턴스 [{{count}}]", + "mappedTaskInstances_other": "작업 인스턴스 [{{count}}]", + "overview": "개요", + "renderedTemplates": "렌더링된 템플릿", + "requiredActions": "필수 작업", + "runs": "실행", + "taskInstances": "작업 인스턴스", + "tasks": "작업", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "모든 작업 그룹 접기", + "expandAll": "모든 작업 그룹 확장" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dags.json new file mode 100644 index 0000000000000..452cfe047e057 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "{{total}}개 에셋 중 {{count}}개 업데이트됨", + "dagActions": { + "delete": { + "button": "Dag 삭제", + "warning": "실행 및 작업를 포함하여 Dag과(와) 관련된 모든 메타데이터가 제거됩니다." + } + }, + "favoriteDag": "Dag 즐겨찾기", + "filters": { + "allRunTypes": "모든 실행 유형", + "allStates": "모든 상태", + "favorite": { + "all": "전체", + "favorite": "즐겨찾기", + "unfavorite": "즐겨찾기 해제" + }, + "paused": { + "active": "활성", + "all": "모두", + "paused": "일시 중지됨" + }, + "runIdPatternFilter": "Dag 실행 검색", + "triggeringUserNameFilter": "트리거한 사용자로 검색" + }, + "ownerLink": "{{owner}}의 소유자 링크", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "작업를 찾을 수 없습니다.", + "title": "영향을 받는 작업: {{count}}" + }, + "clear": { + "button": "{{type}} 지우기", + "buttonTooltip": "Shift+C를 눌러 지우기", + "error": "{{type}}을(를) 지우지 못했습니다.", + "title": "{{type}} 지우기" + }, + "delete": { + "button": "{{type}} 삭제", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "{{type}} 삭제", + "warning": "{{type}}과(와) 관련된 모든 메타데이터가 제거됩니다." + }, + "error": "{{type}} 삭제 오류", + "success": { + "description": "{{type}} 삭제 요청이 성공했습니다.", + "title": "{{type}} 삭제 완료" + } + }, + "markAs": { + "button": "{{type}}을(를) ...으로 표시", + "buttonTooltip": { + "failed": "Shift+F를 눌러 실패로 표시", + "success": "Shift+S를 눌러 성공으로 표시" + }, + "title": "{{type}}을(를) {{state}}으로 표시" + }, + "options": { + "downstream": "다운스트림", + "existingTasks": "기존 작업 지우기", + "future": "미래", + "onlyFailed": "실패한 작업만 지우기", + "past": "과거", + "queueNew": "새 작업 대기열에 추가", + "runOnLatestVersion": "최신 번들 버전으로 실행", + "upstream": "업스트림" + } + }, + "search": { + "advanced": "고급 검색", + "clear": "검색 지우기", + "dags": "Dag 검색", + "hotkey": "+K", + "tasks": "작업 검색" + }, + "sort": { + "displayName": { + "asc": "표시 이름으로 정렬 (A-Z)", + "desc": "표시 이름으로 정렬 (Z-A)" + }, + "lastRunStartDate": { + "asc": "최신 실행 시작 날짜로 정렬 (이전-최신)", + "desc": "최신 실행 시작 날짜로 정렬 (최신-이전)" + }, + "lastRunState": { + "asc": "최신 실행 상태로 정렬 (A-Z)", + "desc": "최신 실행 상태로 정렬 (Z-A)" + }, + "nextDagRun": { + "asc": "다음 Dag 실행으로 정렬 (이전-최신)", + "desc": "다음 Dag 실행으로 정렬 (최신-이전)" + }, + "placeholder": "정렬 기준" + }, + "unfavoriteDag": "Dag 즐겨찾기 해제" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dashboard.json new file mode 100644 index 0000000000000..d5890e2f5dd69 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dashboard.json @@ -0,0 +1,45 @@ +{ + "favorite": { + "favoriteDags_one": "{{count}}개의 즐겨찾기 Dag", + "favoriteDags_other": "{{count}}개의 즐겨찾기 Dags", + "noDagRuns": "이 Dag에는 아직 실행된 DagRun이 없습니다.", + "noFavoriteDags": "아직 즐겨찾기가 없습니다. 목록에서 Dag 옆의 별 아이콘을 클릭하여 즐겨찾기에 추가하세요." + }, + "group": "그룹", + "health": { + "dagProcessor": "Dag 프로세서", + "health": "상태", + "healthy": "정상", + "lastHeartbeat": "마지막 Heartbeat", + "metaDatabase": "메타데이터베이스", + "scheduler": "스케줄러", + "status": "상태", + "triggerer": "트리거러", + "unhealthy": "비정상" + }, + "history": "기록", + "importErrors": { + "dagImportError_one": "Dag 가져오기 오류", + "dagImportError_other": "Dag 가져오기 오류들", + "searchByFile": "파일로 검색", + "timestamp": "타임스탬프" + }, + "managePools": "풀 관리", + "noAssetEvents": "에셋 이벤트를 찾을 수 없습니다.", + "poolSlots": "풀 슬롯", + "sortBy": { + "newestFirst": "최신순", + "oldestFirst": "오래된 순" + }, + "source": "소스", + "stats": { + "activeDags": "활성 Dags", + "failedDags": "실패한 Dags", + "queuedDags": "대기 중인 Dags", + "requiredActions": "필수 작업", + "runningDags": "실행 중인 Dags", + "stats": "통계" + }, + "uri": "Uri", + "welcome": "환영합니다" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/hitl.json new file mode 100644 index 0000000000000..9d0b7ad7c0ce1 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/hitl.json @@ -0,0 +1,23 @@ +{ + "requiredAction_one": "필수 작업", + "requiredAction_other": "필수 작업들", + "requiredActionState": "필수 작업 상태", + "response": { + "error": "응답 실패", + "optionsDescription": "이 작업 인스턴스에 대한 옵션을 선택하세요", + "optionsLabel": "옵션", + "received": "응답 수신 시간: ", + "respond": "응답하기", + "success": "{{taskId}} 응답 성공", + "title": "휴먼 태스크 인스턴스 - {{taskId}}" + }, + "state": { + "approvalReceived": "승인 수신됨", + "approvalRequired": "승인 필요", + "choiceReceived": "선택 수신됨", + "choiceRequired": "선택 필요", + "rejectionReceived": "거절 수신됨", + "responseReceived": "응답 수신됨", + "responseRequired": "응답 필요" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/admin.json new file mode 100644 index 0000000000000..e584a54c77849 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/admin.json @@ -0,0 +1,167 @@ +{ + "columns": { + "description": "Omschrijving", + "key": "Key", + "name": "Naam", + "value": "Waarde" + }, + "config": { + "columns": { + "section": "Sectie" + }, + "title": "Airflow configuratie" + }, + "connections": { + "add": "Connectie toevoegen", + "columns": { + "connectionId": "Connectie ID", + "connectionType": "Connectie type", + "host": "Host", + "port": "Poort" + }, + "connection_one": "Connectie", + "connection_other": "Connecties", + "delete": { + "deleteConnection_one": "Verwijder 1 connectie", + "deleteConnection_other": "{{count}} connecties verwijderen", + "firstConfirmMessage_one": "Je staat op het punt om de volgende connectie te verwijderen:", + "firstConfirmMessage_other": "Je staat op het punt om de volgende connecties te verwijderen:", + "title": "Connectie verwijderen" + }, + "edit": "Connectie wijzigen", + "form": { + "connectionIdRequired": "Connectie ID is verplicht", + "connectionIdRequirement": "Connectie ID kan niet bestaan uit enkel spaties", + "connectionTypeRequired": "Connectie type is verplicht", + "extraFields": "Extra velden", + "extraFieldsJson": "Extra velden JSON", + "helperText": "Missend connectie type? Controller of je de bijbehorende Airflow Providers Package hebt geïnstalleerd.", + "helperTextForRedactedFields": "Gemaskeerde velden ('***') blijven onveranderd als ze niet worden gewijzigd.", + "selectConnectionType": "Selecteer connectie type", + "standardFields": "Standaard velden" + }, + "nothingFound": { + "description": "Connecties gedefinieerd via omgevingsvariabelen of secrets managers worden hier niet weergegeven.", + "documentationLink": "Leer meer in de Airflow documentatie.", + "learnMore": "Deze worden geresolved tijdens runtime en zijn niet zichtbaar in de UI.", + "title": "Geen connectie gevonden!" + }, + "searchPlaceholder": "Connecties zoeken", + "test": "Connectie testen", + "testDisabled": "Het testen van connecties is uitgeschakeld. Contacteer een administrator om dit in te schakelen.", + "typeMeta": { + "error": "Mislukt om connectie type meta op te halen", + "standardFields": { + "description": "Omschrijving", + "host": "Host", + "login": "Login", + "password": "Wachtwoord", + "port": "Poort", + "url_schema": "Schema" + } + } + }, + "deleteActions": { + "button": "Verwijder", + "modal": { + "confirmButton": "Ja, verwijder", + "secondConfirmMessage": "Deze actie is permanent en kan niet ongedaan gemaakt worden.", + "thirdConfirmMessage": " Weet je zeker dat je door wilt gaan?" + }, + "selected": "Geselecteerd", + "tooltip": "Verwijder geselecteerde connecties" + }, + "formActions": { + "reset": "Reset", + "save": "Opsalaan" + }, + "plugins": { + "columns": { + "source": "Bron" + }, + "importError_one": "Plugin import fout", + "importError_other": "Plugin import fouten", + "searchPlaceholder": "Zoeken op bestand" + }, + "pools": { + "add": "Pool toevoegen", + "deferredSlotsIncluded": "Deferred Slots meetellen", + "delete": { + "title": "Pool verwijderen", + "warning": "Dit zal alle metadata gerelateerd aan de pool verwijderen en kan effect hebben op Tasks die deze pool gebruiken." + }, + "edit": "Pool wijzigen", + "form": { + "checkbox": "Vink aan om deferred tasks mee te tellen bij het berekenen van vrije pool slots", + "description": "Omschrijving", + "includeDeferred": "Deferred meetellen", + "nameMaxLength": "Naam mag maximaal 256 karakters bevatten", + "nameRequired": "Naam is verplicht", + "slots": "Slots" + }, + "noPoolsFound": "Geen pools gevonden", + "pool_one": "Pool", + "pool_other": "Pools", + "searchPlaceholder": "Pools zoeken", + "sort": { + "asc": "Naam (A-Z)", + "desc": "Naam (Z-A)", + "placeholder": "Sorteren op" + } + }, + "providers": { + "columns": { + "packageName": "Package naam", + "version": "Versie" + } + }, + "variables": { + "add": "Variabele toevoegen", + "columns": { + "isEncrypted": "Is versleuteld" + }, + "delete": { + "deleteVariable_one": "1 variabele verwijderen", + "deleteVariable_other": "Verwijder {{count}} variabelen", + "firstConfirmMessage_one": "Je staat op het punt om de volgende variabele te verwijderen:", + "firstConfirmMessage_other": "Je staat op het punt om de volgende variabelen te verwijderen:", + "title": "Verwijder variabele", + "tooltip": "Verwijder geselecteerde variabelen" + }, + "edit": "Wijzig variabele", + "export": "Exporteer", + "exportTooltip": "Exporteer geselecteerde variabelen", + "form": { + "invalidJson": "Ongeldige JSON", + "keyMaxLength": "Sleutel mag maximaal 250 karakters bevatten", + "keyRequired": "Sleutel is verplicht", + "valueRequired": "Waarde is verplicht" + }, + "import": { + "button": "Importeer", + "conflictResolution": "Selecteer variabele conflict oplossing", + "errorParsingJsonFile": "Fout bij parsen van JSON bestand: Upload een JSON bestand met variabelen (bijv., {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "Misluk de import als bestaande variabelen gedetecteerd zijn.", + "title": "Mislukken" + }, + "overwrite": { + "description": "Overschrijf variabelen in het geval van een conflict.", + "title": "Overschrijven" + }, + "skip": { + "description": "Sla al bestaande variabelen over.", + "title": "Overslaan" + } + }, + "title": "Importeer variabelen", + "upload": "Upload een JSON bestand", + "uploadPlaceholder": "Upload een JSON bestand met variabelen (bijv., {\"sleutel\": \"waarde\", ...})" + }, + "noRowsMessage": "Geen variabelen gevonden", + "searchPlaceholder": "Sleutels zoeken", + "variable_one": "Variabele", + "variable_other": "Variabelen" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/assets.json new file mode 100644 index 0000000000000..92702d6288bc3 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "Consumerende Dags", + "createEvent": { + "button": "Event aanmaken", + "manual": { + "description": "Handmatig een Asset Event aanmaken", + "extra": "Asset Event extra", + "label": "Handmatig" + }, + "materialize": { + "description": "Trigger de Dag upstream van deze Asset", + "descriptionWithDag": "Trigger de Dag upstream van deze Asset: {{dagName}}", + "label": "Materialiseren", + "unpauseDag": "Hervat {{dagName}} op trigger" + }, + "success": { + "manualDescription": "Handmatig Asset event succesvol aangemaakt.", + "manualTitle": "Asset Event aangemaakt", + "materializeDescription": "Upstream Dag {{dagId}} werd succesvol geactiveerd.", + "materializeTitle": "Asset materialiseren" + }, + "title": "Asset Event voor {{name}} aanmaken" + }, + "group": "Groep", + "lastAssetEvent": "Laatste Asset Event", + "name": "Naam", + "producingTasks": "Producerende taken", + "scheduledDags": "Geplande Dags", + "searchPlaceholder": "Assets zoeken" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/browse.json new file mode 100644 index 0000000000000..22e80de0ca84a --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "All extra JSON inklappen", + "expandAllExtra": "Alle extra JSON uitklappen" + }, + "columns": { + "event": "Event", + "extra": "Extra", + "user": "Gebruiker", + "when": "Wanneer" + }, + "title": "Audit Log" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "Sleutel", + "value": "Waarde" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/common.json new file mode 100644 index 0000000000000..fe23db6168ebb --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/common.json @@ -0,0 +1,302 @@ +{ + "admin": { + "Config": "Configuratie", + "Connections": "Connecties", + "Plugins": "Plugins", + "Pools": "Pools", + "Providers": "Providers", + "Variables": "Variabelen" + }, + "asset_one": "Asset", + "asset_other": "Assets", + "assetEvent_one": "Asset Event", + "assetEvent_other": "Asset Events", + "backfill_one": "Backfill", + "backfill_other": "Backfills", + "browse": { + "auditLog": "Audit Log", + "requiredActions": "Vereiste acties", + "xcoms": "XComs" + }, + "collapseDetailsPanel": "Details inklappen", + "createdAssetEvent_one": "Asset Event aangemaakt", + "createdAssetEvent_other": "Asset Events aangemaakt", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "Catchup", + "concurrency": "Concurrency", + "dagRunTimeout": "Dag Run timeout", + "defaultArgs": "Standaard argumenten", + "description": "Omschrijving", + "documentation": "Dag documentatie", + "fileLocation": "Bestandslocatie", + "hasTaskConcurrencyLimits": "Heeft Task Concurrency limieten", + "lastExpired": "Laatst verlopen", + "lastParsed": "Laatst geparsed", + "latestDagVersion": "Laatste Dag versie", + "latestRun": "Laatste Run", + "maxActiveRuns": "Maximaal aantal actieve Runs", + "maxActiveTasks": "Maximaal aantal actieve Tasks", + "maxConsecutiveFailedDagRuns": "Maximaal aantal opeenvolgend mislukte Dag Runs", + "nextRun": "Volgende Run", + "owner": "Eigenaar", + "params": "Parameters", + "schedule": "Planning", + "tags": "Labels" + }, + "dagId": "Dag ID", + "dagRun": { + "conf": "Conf", + "dagVersions": "Dag Versie(s)", + "dataIntervalEnd": "Data interval einde", + "dataIntervalStart": "Data interval begin", + "lastSchedulingDecision": "Laatste planningsbeslissing", + "queuedAt": "In de wachtrij gezet om", + "runAfter": "Run na", + "runType": "Run type", + "sourceAssetEvent": "Bron Asset Event", + "triggeredBy": "Geactiveerd door", + "triggeringUser": "Activerende gebruikersnaam" + }, + "dagRun_one": "Dag Run", + "dagRun_other": "Dag Runs", + "dagRunId": "Dag Run ID", + "dagWarnings": "Dag waarschuwingen/fouten", + "defaultToGraphView": "Standaard grafiekweergave", + "defaultToGridView": "Standaard rasterweergave", + "direction": "Richting", + "docs": { + "documentation": "Documentatie", + "githubRepo": "GitHub Repo", + "restApiReference": "REST API referentie" + }, + "duration": "Duur", + "endDate": "Einddatum", + "error": { + "back": "Terug", + "defaultMessage": "Een onverwachte fout is opgetreden", + "home": "Home", + "notFound": "Pagina niet gevonden", + "title": "Fout" + }, + "expand": { + "collapse": "Uitklappen", + "expand": "Inklappen", + "hotkey": "e", + "tooltip": "Druk op {{hotkey}} op in- of uit te klappen" + }, + "expression": { + "all": "Alles", + "and": "En", + "any": "Elk", + "or": "Of" + }, + "logicalDate": "Logische datum", + "logout": "Uitloggen", + "logoutConfirmation": "Je staat op het punt om uit te loggen uit de applicatie.", + "mapIndex": "Map Index", + "modal": { + "cancel": "Annuleer", + "confirm": "Bevestig", + "delete": { + "button": "Verwijder", + "confirmation": "Weet je zeker dat je {{resourceName}} wilt verwijderen? Deze actie kan niet ongedaan gemaakt worden." + } + }, + "nav": { + "admin": "Beheer", + "assets": "Assets", + "browse": "Verken", + "dags": "Dags", + "docs": "Documentatie", + "home": "Home", + "legacyFabViews": "Legacy weergaven", + "plugins": "Plugins", + "security": "Beveiliging" + }, + "noItemsFound": "Geen {{modelName}} gevonden", + "note": { + "add": "Notitie toevoegen", + "dagRun": "Dag Run notitie", + "label": "Notitie", + "placeholder": "Voeg een notitie toe...", + "taskInstance": "Task Instance notitie" + }, + "pools": { + "deferred": "Uitgesteld", + "open": "Open", + "pools_one": "pool", + "pools_other": "pools", + "queued": "Wachtend", + "running": "Lopend", + "scheduled": "Gepland" + }, + "runId": "Run ID", + "runTypes": { + "asset_triggered": "Asset Triggered", + "backfill": "Backfill", + "manual": "Handmatig", + "scheduled": "Scheduled" + }, + "scroll": { + "direction": { + "bottom": "het einde", + "top": "het begin" + }, + "tooltip": "Druk op {{hotkey}} om te scrollen naar {{direction}}" + }, + "seconds": "{{count}}s", + "security": { + "actions": "Acties", + "permissions": "Permissies", + "resources": "Resources", + "roles": "Rollen", + "users": "Gebruikers" + }, + "selectLanguage": "Selecteer taal", + "showDetailsPanel": "Details weergeven", + "source": { + "hide": "Verberg bron", + "hotkey": "s", + "show": "Toon bron" + }, + "sourceAssetEvent_one": "Bron Asset Event", + "sourceAssetEvent_other": "Bron Asset Events", + "startDate": "Start datum", + "state": "Status", + "states": { + "deferred": "Uitgesteld", + "failed": "Mislukt", + "no_status": "Geen status", + "none": "Geen status", + "queued": "Wachtend", + "removed": "Verwijderd", + "restarting": "Herstartend", + "running": "Lopend", + "scheduled": "Gepland", + "skipped": "Overgeslagen", + "success": "Succesvol", + "up_for_reschedule": "Wachtend op herplanning", + "up_for_retry": "Wachtend op een nieuwe poging", + "upstream_failed": "Upstream mislukt" + }, + "switchToDarkMode": "Schakel over naar donkere modus", + "switchToLightMode": "Schakel over naar lichte modus", + "table": { + "completedAt": "Completed at", + "createdAt": "Created at", + "filterByTag": "Filter Dags op label", + "filterColumns": "Filter tabel kolommen", + "filterReset_one": "Reset filter", + "filterReset_other": "Reset filters", + "from": "Van", + "maxActiveRuns": "Maximaal aantal active Runs", + "noTagsFound": "Geen labels gevonden", + "tagMode": { + "all": "All", + "any": "Any" + }, + "tagPlaceholder": "Filter op label", + "to": "Naar" + }, + "task": { + "documentation": "Task documentatie", + "lastInstance": "Laatste Task Instance", + "operator": "Operator", + "triggerRule": "Trigger regel" + }, + "task_one": "Task", + "task_other": "Tasks", + "taskId": "Task ID", + "taskInstance": { + "dagVersion": "Dag versie", + "executor": "Executor", + "executorConfig": "Executor Configuratie", + "hostname": "Hostname", + "maxTries": "Maximaal aantal pogingen", + "pid": "PID", + "pool": "Pool", + "poolSlots": "Pool Slots", + "priorityWeight": "Prioriteitsgewicht", + "queue": "Wachtrij", + "queuedWhen": "In de wachtrij gezet om", + "scheduledWhen": "Gepland om", + "triggerer": { + "assigned": "Toegewegen Triggerer", + "class": "Trigger class", + "createdAt": "Trigger aangemaakt op", + "id": "Trigger ID", + "latestHeartbeat": "Laatste Triggerer hartslag", + "title": "Triggerer informatie" + }, + "unixname": "Unix naam" + }, + "taskInstance_one": "Task Instance", + "taskInstance_other": "Task Instances", + "timeRange": { + "last12Hours": "Laatste 12 uur", + "last24Hours": "Laatste 24 uur", + "lastHour": "Laatste uur", + "pastWeek": "Laatste week" + }, + "timestamp": { + "hide": "Verberg tijd", + "hotkey": "t", + "show": "Toon tijd" + }, + "timezone": "Tijdzone", + "timezoneModal": { + "current-timezone": "Huidige tijd in", + "placeholder": "Selecteer een tijdzone", + "title": "Selecteer tijdzone", + "utc": "UTC (Gecoördineerde wereldtijd)" + }, + "toaster": { + "bulkDelete": { + "error": "Massaverwijderingsverzoek voor {{resourceName}} verzoek mislukt", + "success": { + "description": "{{count}} {{resourceName}} zijn succesvol verwijderd. Sleutels: {{keys}}", + "title": "Massaverwijderingsverzoek voor {{resourceName}} verzoek verzonden" + } + }, + "create": { + "error": "Aanmaakverzoek voor {{resourceName}} mislukt", + "success": { + "description": "{{resourceName}} is succesvol aangemaakt.", + "title": "Aanmaakverzoek voor {{resourceName}} verzonden" + } + }, + "delete": { + "error": "Verwijderverzoek voor {{resourceName}} mislukt", + "success": { + "description": "{{resourceName}} is succesvol verwijderd.", + "title": "Verwijderverzoek voor {{resourceName}} verzonden" + } + }, + "import": { + "error": "Importeerverzoek voor {{resourceName}} mislukt", + "success": { + "description": "{{count}} {{resourceName}} zijn succesvol geïmporteerd.", + "title": "Importeerverzoek voor {{resourceName}} verzonden" + } + }, + "update": { + "error": "Updateverzoek voor {{resourceName}} mislukt", + "success": { + "description": "{{resourceName}} is succesvol geupdatet.", + "title": "Updateverzoek voor {{resourceName}} verzonden" + } + } + }, + "total": "Totaal {{state}}", + "triggered": "Triggered", + "tryNumber": "Poging nummer", + "user": "Gebruiker", + "wrap": { + "hotkey": "w", + "tooltip": "Druk op {{hotkey}} om wrap te schakelen", + "unwrap": "Unwrap", + "wrap": "Wrap" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/components.json new file mode 100644 index 0000000000000..b3b6e82d10f6f --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/components.json @@ -0,0 +1,136 @@ +{ + "backfill": { + "affected_one": "1 run zal getriggered worden.", + "affected_other": "{{count}} Runs zullen getriggered worden.", + "affectedNone": "Geen Runs komen overeen met de geselecteerde criteria.", + "allRuns": "Alle Runs", + "backwards": "Run achterstevoren", + "dateRange": "Datumbereik", + "dateRangeFrom": "Van", + "dateRangeTo": "Tot", + "errorStartDateBeforeEndDate": "Startdatum moet voor de einddatum", + "maxRuns": "Maximaal aantal actieve Runs", + "missingAndErroredRuns": "Missende en mislukte Runs", + "missingRuns": "Missende Runs", + "reprocessBehavior": "Reprocess gedrag", + "run": "Run Backfill", + "selectDescription": "Draai deze Dag voor een geselecteerd datumbereik", + "selectLabel": "Backfill", + "title": "Run Backfill", + "toaster": { + "success": { + "description": "Backfill jobs zijn succesvol getriggerd.", + "title": "Backfill gegenereerd" + } + }, + "tooltip": "Backfill benodigd een planning", + "unpause": "Hervat {{dag_display_name}} na een trigger", + "validation": { + "datesRequired": "Data interval startdatum en einddatum moeten beide gegeven zijn.", + "startBeforeEnd": "Data interval startdatum moet eerder of gelijk aan de data interval einddatum zijn." + } + }, + "banner": { + "backfillInProgress": "Backfill in uitvoering", + "cancel": "Annuleer backfill", + "pause": "Pauzeer backfill", + "unpause": "Backfill hervatten" + }, + "clipboard": { + "copy": "Kopieer" + }, + "close": "Sluiten", + "configForm": { + "advancedOptions": "Geavanceerde opties", + "configJson": "Configuratie JSON", + "invalidJson": "Ongeldig JSON formaat: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "1 fout", + "error_other": "{{count}} fouten", + "errorAndWarning": "1 fout en {{warningText}}", + "warning_one": "1 waarschuwing", + "warning_other": "{{count}} waarschuwingen" + }, + "durationChart": { + "duration": "Duur (seconds)", + "lastDagRun_one": "Laatste Dag Run", + "lastDagRun_other": "Laatste {{count}} Dag Runs", + "lastTaskInstance_one": "Laatste Task Instance", + "lastTaskInstance_other": "Laatste {{count}} Task Instances", + "queuedDuration": "Duur in de wachtrij", + "runAfter": "Run na", + "runDuration": "Run duur" + }, + "fileUpload": { + "files_one": "{{count}} bestand", + "files_other": "{{count}} bestanden" + }, + "flexibleForm": { + "placeholder": "Selecteer waarde", + "placeholderArray": "Voer elke string in op een nieuwe regel", + "placeholderExamples": "Start met typen om opties te zien", + "placeholderMulti": "Selecteer een of meerder waarden", + "validationErrorArrayNotArray": "Waarde moet een array zijn.", + "validationErrorArrayNotNumbers": "Alle elementen in de array moeten nummers zijn.", + "validationErrorArrayNotObject": "Alle elementen in de array moeten objecten zijn.", + "validationErrorRequired": "Dit veld is verplicht" + }, + "graph": { + "directionDown": "Boven naar beneden", + "directionLeft": "Rechts naar links", + "directionRight": "Links naar rechts", + "directionUp": "Beneden naar boven", + "downloadImage": "Download afbeelding van de grafiek", + "downloadImageError": "Mislukt om de afbeelding van de grafiek te downloaden.", + "downloadImageErrorTitle": "Download mislukt", + "otherDagRuns": "+Andere Dag Runs", + "taskCount_one": "{{count}} Task", + "taskCount_other": "{{count}} Tasks", + "taskGroup": "Task Group" + }, + "limitedList": "+{{count}} meer", + "logs": { + "file": "Bestand", + "location": "regel {{line}} in {{name}}" + }, + "reparseDag": "Herparse Dag", + "sortedAscending": "oplopend gesorteerd", + "sortedDescending": "aflopend gesorteerd", + "sortedUnsorted": "ongesorteerd", + "taskTries": "Task pogingen", + "toggleCardView": "Toon kaartweergave", + "toggleTableView": "Toon tabelweergave", + "triggerDag": { + "button": "Trigger", + "loading": "DAG informatie aan het laden...", + "loadingFailed": "Mislukt om DAG informatie te laden. Probeer het opnieuw.", + "runIdHelp": "Optioneel - wordt gegenereerd indien niet opgegeven", + "selectDescription": "Trigger een enkele run van deze Dag", + "selectLabel": "Enkele Run", + "title": "Trigger Dag", + "toaster": { + "success": { + "description": "Dag run is succesvol getriggerd.", + "title": "Dag Run triggered" + } + }, + "unpause": "Hervat {{dagDisplayName}} op trigger" + }, + "trimText": { + "details": "Details", + "empty": "Leeg", + "noContent": "Geen inhoud beschikbaar." + }, + "versionDetails": { + "bundleLink": "Bundle link", + "bundleName": "Bundle naam", + "bundleVersion": "Bundle versie", + "createdAt": "Gemaakt op", + "versionId": "Versie ID" + }, + "versionSelect": { + "dagVersion": "Dag versie", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/dag.json new file mode 100644 index 0000000000000..6543180a6a744 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/dag.json @@ -0,0 +1,116 @@ +{ + "allRuns": "Alle Runs", + "blockingDeps": { + "dependency": "Afhankelijkheid", + "reason": "Reden", + "title": "Afhankelijkheden die de Task blokkeren om ingepland te worden" + }, + "code": { + "bundleUrl": "Bundle URL", + "noCode": "Geen code gevonden", + "parsedAt": "Parsed at:" + }, + "extraLinks": "Extra Links", + "grid": { + "buttons": { + "resetToLatest": "Reset naar laatste", + "toggleGroup": "Groepen omschakelen" + } + }, + "header": { + "buttons": { + "advanced": "Geavanceerd", + "dagDocs": "Dag Docs" + } + }, + "logs": { + "allLevels": "Alle log niveaus", + "allSources": "Alle bronnen", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", + "fullscreen": { + "button": "Volledig scherm", + "tooltip": "Druk {{hotkey}} voor volledig scherm" + }, + "info": "INFO", + "noTryNumber": "Geen poging nummer", + "settings": "Loginstellingen", + "viewInExternal": "Bekijk logs in {{name}} (attempt {{attempt}})", + "warning": "WARNING" + }, + "overview": { + "buttons": { + "failedRun_one": "Mislukte Run", + "failedRun_other": "Mislukte Runs", + "failedTask_one": "Mislukte Task", + "failedTask_other": "Mislukte Tasks", + "failedTaskInstance_one": "Mislukte Task Instance", + "failedTaskInstance_other": "Mislukte Task Instances" + }, + "charts": { + "assetEvent_one": "Aangemaakte Asset Event", + "assetEvent_other": "Aangemaakte Asset Events" + }, + "failedLogs": { + "title": "Recente mislukte Task logs", + "viewFullLogs": "Bekijk volledige logs" + } + }, + "panel": { + "buttons": { + "options": "Opties", + "showGraph": "Grafiek tonen", + "showGrid": "Raster tonen" + }, + "dagRuns": { + "label": "Aantal Dag Runs" + }, + "dependencies": { + "label": "Afhankelijkheden", + "options": { + "allDagDependencies": "Alle Dag afhankelijkheden", + "externalConditions": "Externe omstandigheden", + "onlyTasks": "Alleen Rasks" + }, + "placeholder": "Afhankelijkheden" + }, + "graphDirection": { + "label": "Richting van de grafiek" + } + }, + "paramsFailed": "Mislukt om parameters te laden", + "parse": { + "toaster": { + "error": { + "description": "Dag parsing verzoek mislukt. Er kunnen nog wachtende parse verzoeken afgehandeld worden.", + "title": "Mislukt om de Dag te reparsen" + }, + "success": { + "description": "Dag zal snel gereparsed worden.", + "title": "Reparse verzoek succesvol verzonden" + } + } + }, + "tabs": { + "assetEvents": "Asset Events", + "auditLog": "Audit Log", + "backfills": "Backfills", + "code": "Code", + "details": "Details", + "logs": "Logs", + "mappedTaskInstances_one": "Task Instance [{{count}}]", + "mappedTaskInstances_other": "Task Instances [{{count}}]", + "overview": "Overzicht", + "renderedTemplates": "Gerenderde templates", + "requiredActions": "Vereiste acties", + "runs": "Runs", + "taskInstances": "Task Instances", + "tasks": "Tasks", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "Alle Task Groups inklappen", + "expandAll": "Alle Task Groups uitklappen" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/dags.json new file mode 100644 index 0000000000000..b57b2290a9232 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "{{count}} van {{total}} assets geupdatet", + "dagActions": { + "delete": { + "button": "Verwijder Dag", + "warning": "Dit zal alle metadata gerelateerd aan de Dag verwijderen, inclusief Runs en Tasks." + } + }, + "favoriteDag": "Favoriete Dag", + "filters": { + "allRunTypes": "Alle Run types", + "allStates": "Alle statussen", + "favorite": { + "all": "Alle", + "favorite": "Favoriet", + "unfavorite": "Verwijder Favoriet" + }, + "paused": { + "active": "Actief", + "all": "Alles", + "paused": "Gepauzeerd" + }, + "runIdPatternFilter": "Zoek Dag Runs", + "triggeringUserNameFilter": "Zoek op Triggering User" + }, + "ownerLink": "Eigenaarslink voor {{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "Geen Tasks gevonden.", + "title": "Affected Tasks: {{count}}" + }, + "clear": { + "button": "Wis {{type}}", + "buttonTooltip": "Druk op Shift+C om te wissen", + "error": "Mislukt om {{type}} te wissen", + "title": "Wis {{type}}" + }, + "delete": { + "button": "Verwijder {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "Verwijder {{type}}", + "warning": "Dit zal alle metadata gerelateerd aan {{type}} verwijderen." + }, + "error": "Fout bij het verwijderen van {{type}}", + "success": { + "description": "Het verzoek om {{type}} te verwijderen was succesvol.", + "title": "{{type}} succesvol verwijderd" + } + }, + "markAs": { + "button": "Makeer {{type}} als ...", + "buttonTooltip": { + "failed": "Druk op Shift+F om te markeren als mislukt", + "success": "Druk op Shift+S om te markeren als succesvol" + }, + "title": "Markeer {{type}} als {{state}}" + }, + "options": { + "downstream": "Downstream", + "existingTasks": "Wis bestaande Tasks", + "future": "Toekomst", + "onlyFailed": "Wis enkel mislukte Tasks", + "past": "Verleden", + "queueNew": "Zet nieuwe Tasks in de wachtrij", + "runOnLatestVersion": "Voer uit met de nieuwste bundelversie", + "upstream": "Upstream" + } + }, + "search": { + "advanced": "Geavanceerd zoeken", + "clear": "Zoekopdracht wissen", + "dags": "Zoek Dags", + "hotkey": "+K", + "tasks": "Zoek Tasks" + }, + "sort": { + "displayName": { + "asc": "Sorteer op weergavenaam (A-Z)", + "desc": "Sorteer op weergavenaam (Z-A)" + }, + "lastRunStartDate": { + "asc": "Sorteer op laatste Run startdatum (Eerste-Laatste)", + "desc": "Sorteer op laatste Run startdatum (Laatste-Eerste)" + }, + "lastRunState": { + "asc": "Sorteer op laatste Run status (A-Z)", + "desc": "Sorteer op laatste Run status (Z-A)" + }, + "nextDagRun": { + "asc": "Sorteer op volgende Dag Run (Eerste-Laatste)", + "desc": "Sorteer op volgende Dag Run (Laatste-Eerste)" + }, + "placeholder": "Sorteer op" + }, + "unfavoriteDag": "Verwijder Favoriete Dag" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/dashboard.json new file mode 100644 index 0000000000000..91f9d84a06ad3 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/dashboard.json @@ -0,0 +1,45 @@ +{ + "favorite": { + "favoriteDags_one": "Eerste {{count}} favoriete Dags", + "favoriteDags_other": "Eerste {{count}} favoriete Dags", + "noDagRuns": "Er is nog geen Dag Run voor deze Dag.", + "noFavoriteDags": "Geen favorieten. Klik op het sterpictogram naast een Dag in de lijst om deze aan je favorieten toe te voegen." + }, + "group": "Groep", + "health": { + "dagProcessor": "Dag Processor", + "health": "Status", + "healthy": "OK", + "lastHeartbeat": "Laatste hartslag", + "metaDatabase": "MetaDatabase", + "scheduler": "Scheduler", + "status": "Status", + "triggerer": "Triggerer", + "unhealthy": "Fout" + }, + "history": "Geschiedenis", + "importErrors": { + "dagImportError_one": "Dag leesfout", + "dagImportError_other": "Dag leesfouten", + "searchByFile": "Zoek op bestand", + "timestamp": "Tijd" + }, + "managePools": "Beheer Pools", + "noAssetEvents": "Geen Asset Events gevonden.", + "poolSlots": "Pool Slots", + "sortBy": { + "newestFirst": "Nieuwste eerst", + "oldestFirst": "Oudste eerst" + }, + "source": "Bron", + "stats": { + "activeDags": "Actieve Dags", + "failedDags": "Mislukte Dags", + "queuedDags": "Dags in de wachtrij", + "requiredActions": "Vereiste acties", + "runningDags": "Lopende Dags", + "stats": "Statistieken" + }, + "uri": "Uri", + "welcome": "Welkom" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/nl/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/nl/hitl.json new file mode 100644 index 0000000000000..c71a772cdbd07 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/nl/hitl.json @@ -0,0 +1,23 @@ +{ + "requiredAction_one": "Vereiste acties", + "requiredAction_other": "Vereiste acties", + "requiredActionState": "Vereiste actie status", + "response": { + "error": "Reactie mislukt", + "optionsDescription": "Kies je opties voor deze Task Instance", + "optionsLabel": "Opties", + "received": "Reactie ontvangen op ", + "respond": "Reageer", + "success": "{{taskId}} reactie succesvol", + "title": "Human Task Instance - {{taskId}}" + }, + "state": { + "approvalReceived": "Goedkeuring ontvangen", + "approvalRequired": "Goedkeuring vereist", + "choiceReceived": "Keuze ontvangen", + "choiceRequired": "Keuze vereist", + "rejectionReceived": "Afwijzing ontvangen", + "responseReceived": "Reactie ontvangen", + "responseRequired": "Reactie vereist" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/admin.json new file mode 100644 index 0000000000000..3328d86ee87b1 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/admin.json @@ -0,0 +1,183 @@ +{ + "columns": { + "description": "Opis", + "key": "Klucz", + "name": "Nazwa", + "value": "Wartość" + }, + "config": { + "columns": { + "section": "Sekcja" + }, + "title": "Konfiguracja Airflowa" + }, + "connections": { + "add": "Dodaj połączenie", + "columns": { + "connectionId": "Identyfikator połączenia", + "connectionType": "Typ połączenia", + "host": "Host", + "port": "Port" + }, + "connection_few": "Połączenia", + "connection_many": "Połączeń", + "connection_one": "Połączenie", + "connection_other": "Połączenia", + "delete": { + "deleteConnection_few": "Usuń {{count}} połączenia", + "deleteConnection_many": "Usuń {{count}} połączeń", + "deleteConnection_one": "Usuń 1 połączenie", + "deleteConnection_other": "Usuń {{count}} połączenia", + "firstConfirmMessage_few": "Zamierzasz usunąć następujące połączenia:", + "firstConfirmMessage_many": "Zamierzasz usunąć następujące połączenia:", + "firstConfirmMessage_one": "Zamierzasz usunąć następujące połączenie:", + "firstConfirmMessage_other": "Zamierzasz usunąć następujące połączenia:", + "title": "Usuń połączenie" + }, + "edit": "Edytuj połączenie", + "form": { + "connectionIdRequired": "Identyfikator połączenia jest wymagany", + "connectionIdRequirement": "Identyfikator połączenia nie może zawierać wyłącznie spacji", + "connectionTypeRequired": "Typ połączenia jest wymagany", + "extraFields": "Dodatkowe pola", + "extraFieldsJson": "Dodatkowe pola JSON", + "helperText": "Brakuje typu połączenia? Upewnij się, że zainstalowałeś odpowiedniego dostawcę.", + "helperTextForRedactedFields": "Ukryte pola ('***') nie bedą zapisane jeśli nie zostaną zmienione.", + "selectConnectionType": "Wybierz typ połączenia", + "standardFields": "Standardowe pola" + }, + "nothingFound": { + "description": "Połączenia zdefiniowane za pomocą zmiennych środowiskowych lub menedżerów sekretów nie są tutaj wyświetlane.", + "documentationLink": "Dowiedz się więcej w dokumentacji Airflow.", + "learnMore": "Są one rozwiązywane w czasie wykonywania i nie są widoczne w interfejsie użytkownika.", + "title": "Nie znaleziono połączeń!" + }, + "searchPlaceholder": "Szukaj połączeń", + "test": "Test połączenia", + "testDisabled": "Testowanie połączeń wyłączone. Skontaktuj się z administratorem, aby je włączyć.", + "typeMeta": { + "error": "Nie udało się pobrać metadanych typu połączenia", + "standardFields": { + "description": "Opis", + "host": "Host", + "login": "Login", + "password": "Hasło", + "port": "Port", + "url_schema": "Schemat" + } + } + }, + "deleteActions": { + "button": "Usuń", + "modal": { + "confirmButton": "Tak, usuń", + "secondConfirmMessage": "Ta akcja jest nieodwracalna.", + "thirdConfirmMessage": "Czy na pewno chcesz kontynuować?" + }, + "selected": "Wybrano", + "tooltip": "Usuń wybrane połączenia" + }, + "formActions": { + "reset": "Resetuj", + "save": "Zapisz" + }, + "plugins": { + "columns": { + "source": "Źródło" + }, + "importError_few": "Błędy importu wtyczek", + "importError_many": "Błędów importu wtyczek", + "importError_one": "Błąd importu wtyczki", + "importError_other": "Błędy importu wtyczek", + "searchPlaceholder": "Szukaj po pliku" + }, + "pools": { + "add": "Dodaj pulę", + "deferredSlotsIncluded": "Uwzględniono odroczone miejsca", + "delete": { + "title": "Usuń pulę", + "warning": "To usunie wszystkie metadane związane z pulą i może wpłynąć na zadania korzystające z tej puli." + }, + "edit": "Edytuj pulę", + "form": { + "checkbox": "Zaznacz, aby uwzględnić zadania odroczone przy obliczaniu wolnych miejsc w puli", + "description": "Opis", + "includeDeferred": "Uwzględnij odroczone", + "nameMaxLength": "Nazwa może zawierać maksymalnie 256 znaków", + "nameRequired": "Nazwa jest wymagana", + "slots": "Miejsca" + }, + "noPoolsFound": "Nie znaleziono pul", + "pool_few": "Pule", + "pool_many": "Puli", + "pool_one": "Pula", + "pool_other": "Pule", + "searchPlaceholder": "Szukaj pul", + "sort": { + "asc": "Nazwa (A-Z)", + "desc": "Nazwa (Z-A)", + "placeholder": "Sortuj według" + } + }, + "providers": { + "columns": { + "packageName": "Nazwa paczki", + "version": "Wersja" + } + }, + "variables": { + "add": "Dodaj zmienną", + "columns": { + "isEncrypted": "Zaszyfrowana" + }, + "delete": { + "deleteVariable_few": "Usuń {{count}} zmienne", + "deleteVariable_many": "Usuń {{count}} zmiennych", + "deleteVariable_one": "Usuń 1 zmienną", + "deleteVariable_other": "Usuń {{count}} zmienne", + "firstConfirmMessage_few": "Zamierzasz usunąć następujące zmienne:", + "firstConfirmMessage_many": "Zamierzasz usunąć następujące zmienne:", + "firstConfirmMessage_one": "Zamierzasz usunąć następującą zmienną:", + "firstConfirmMessage_other": "Zamierzasz usunąć następujące zmienne:", + "title": "Usuń zmienną", + "tooltip": "Usuń wybrane zmienne" + }, + "edit": "Edytuj zmienną", + "export": "Eksportuj", + "exportTooltip": "Eksportuj zmienne", + "form": { + "invalidJson": "Nieprawidłowy JSON", + "keyMaxLength": "Klucz może zawierać maksymalnie 250 znaków", + "keyRequired": "Klucz jest wymagany", + "valueRequired": "Wartość jest wymagana" + }, + "import": { + "button": "Importuj", + "conflictResolution": "Wybierz sposób rozwiązywania konfliktów zmiennych", + "errorParsingJsonFile": "Błąd podczas przetwarzania pliku JSON: Prześlij plik JSON zawierający zmienne (np. {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "Import nie powiedzie się, jeśli wykryte zostaną istniejące zmienne.", + "title": "Przerwij" + }, + "overwrite": { + "description": "Nadpisuje zmienną w przypadku konfliktu.", + "title": "Nadpisz" + }, + "skip": { + "description": "Pomija import zmiennych, które już istnieją.", + "title": "Pomiń" + } + }, + "title": "Importuj zmienne", + "upload": "Prześlij plik JSON", + "uploadPlaceholder": "Prześlij plik JSON zawierający zmienne (np. {\"key\": \"value\", ...})" + }, + "noRowsMessage": "Nie znaleziono zmiennych", + "searchPlaceholder": "Szukaj kluczy", + "variable_few": "Zmienne", + "variable_many": "Zmiennych", + "variable_one": "Zmienna", + "variable_other": "Zmienne" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/assets.json new file mode 100644 index 0000000000000..8456890416b2b --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "Przetwarzanie Dagów", + "createEvent": { + "button": "Utwórz zdarzenie", + "manual": { + "description": "Utwórz ręcznie zdarzenie zasobu", + "extra": "Dodatkowe informacje o zdarzeniu zasobu", + "label": "Ręcznie" + }, + "materialize": { + "description": "Wykonaj Dag zależny od tego zasobu", + "descriptionWithDag": "Wykonaj Dag zależny od tego zasobu: {{dagName}}", + "label": "Materializuj", + "unpauseDag": "Wznów {{dagName}} przy wywołaniu" + }, + "success": { + "manualDescription": "Ręczne utworzenie zdarzenia zasobu zakończone sukcesem.", + "manualTitle": "Zdarzenie zasobu utworzone", + "materializeDescription": "Zależny Dag {{dagId}} został pomyślnie wywołany.", + "materializeTitle": "Materializowanie zasobu" + }, + "title": "Utwórz zdarzenie zasobu dla {{name}}" + }, + "group": "Grupa", + "lastAssetEvent": "Ostatnie zdarzenie zasobu", + "name": "Nazwa", + "producingTasks": "Zadania produkujące", + "scheduledDags": "Zaplanowane Dagi", + "searchPlaceholder": "Szukaj zasobów" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/browse.json new file mode 100644 index 0000000000000..5066d6dc9df7c --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "Zwiń wszystkie dodatkowe dane JSON", + "expandAllExtra": "Rozwiń wszystkie dodatkowe dane JSON" + }, + "columns": { + "event": "Zdarzenie", + "extra": "Dodatkowe", + "user": "Użytkownik", + "when": "Kiedy" + }, + "title": "Dziennik audytu" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "Klucz", + "value": "Wartość" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/common.json new file mode 100644 index 0000000000000..c394d6d79e0bd --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/common.json @@ -0,0 +1,324 @@ +{ + "admin": { + "Config": "Konfiguracja", + "Connections": "Połączenia", + "Plugins": "Wtyczki", + "Pools": "Pule", + "Providers": "Providery", + "Variables": "Zmienne" + }, + "asset_few": "Zasoby", + "asset_many": "Zasobów", + "asset_one": "Zasób", + "asset_other": "Zasoby", + "assetEvent_few": "Zdarzenia zasobów", + "assetEvent_many": "Zdarzeń zasobów", + "assetEvent_one": "Zdarzenie Zasobu", + "assetEvent_other": "Zdarzenia Zasobów", + "backfill_few": "Wypełnienia wsteczne", + "backfill_many": "Wypełnień wstecznych", + "backfill_one": "Wypełnienie wsteczne", + "backfill_other": "Wypełnienia wsteczne", + "browse": { + "auditLog": "Log audytu", + "requiredActions": "Wymagane akcje", + "xcoms": "XComy" + }, + "collapseDetailsPanel": "Zwiń panel szczegółów", + "createdAssetEvent_few": "Utworzone zdarzenia zasobów", + "createdAssetEvent_many": "Utworzonych zdarzeń zasobów", + "createdAssetEvent_one": "Utworzone zdarzenie zasobu", + "createdAssetEvent_other": "Utworzone zdarzenia zasobów", + "dag_few": "Dagi", + "dag_many": "Dagów", + "dag_one": "Dag", + "dag_other": "Dagi", + "dagDetails": { + "catchup": "Nadrabianie zaległości", + "concurrency": "Współbieżność", + "dagRunTimeout": "Limit czasu wykonania", + "defaultArgs": "Domyślne argumenty", + "description": "Opis", + "documentation": "Dokumentacja Daga", + "fileLocation": "Lokalizacja pliku", + "hasTaskConcurrencyLimits": "Posiada ograniczenia współbieżności zadań", + "lastExpired": "Ostatnio wygasły", + "lastParsed": "Ostatnia analiza", + "latestDagVersion": "Najnowsza wersja Daga", + "latestRun": "Ostatnie wykonanie", + "maxActiveRuns": "Maksymalna liczba aktywnych wykonań", + "maxActiveTasks": "Maksymalna liczba aktywnych zadań", + "maxConsecutiveFailedDagRuns": "Maksymalna liczba kolejnych nieudanych wykonań Daga", + "nextRun": "Następne Wykonanie", + "owner": "Właściciel", + "params": "Parametry", + "schedule": "Harmonogram", + "tags": "Etykiety" + }, + "dagId": "Identyfikator Daga", + "dagRun": { + "conf": "Konfiguracja", + "dagVersions": "Wersje Daga", + "dataIntervalEnd": "Koniec interwału danych", + "dataIntervalStart": "Początek interwału danych", + "lastSchedulingDecision": "Ostatnia decyzja harmonogramu", + "queuedAt": "Zakolejkowano o", + "runAfter": "Wykonaj po", + "runType": "Typ wykonania", + "sourceAssetEvent": "Zdarzenie źródłowego zasobu", + "triggeredBy": "Uruchomiony jako", + "triggeringUser": "Użytkownik który uruchomił" + }, + "dagRun_few": "Wykonania", + "dagRun_many": "Wykonań", + "dagRun_one": "Wykonanie", + "dagRun_other": "Wykonania", + "dagRunId": "Identyfikator wykonania", + "dagWarnings": "Ostrzeżenia/Błędy Daga", + "defaultToGraphView": "Domyślnie widok grafu", + "defaultToGridView": "Domyślnie widok siatki", + "direction": "Kierunek", + "docs": { + "documentation": "Dokumentacja", + "githubRepo": "Repozytorium GitHub", + "restApiReference": "Dokuentacja REST API" + }, + "duration": "Czas trwania", + "endDate": "Data zakończenia", + "error": { + "back": "Powrót", + "defaultMessage": "Wystąpił nieoczekiwany błąd", + "home": "Strona główna", + "notFound": "Nie znaleziono strony", + "title": "Błąd" + }, + "expand": { + "collapse": "Zwiń", + "expand": "Rozwiń", + "hotkey": "e", + "tooltip": "Wybierz {{hotkey}} aby przełączyć rozwijanie" + }, + "expression": { + "all": "Wszystkie", + "and": "ORAZ", + "any": "Dowolne", + "or": "LUB" + }, + "logicalDate": "Data logiczna", + "logout": "Wyloguj", + "logoutConfirmation": "Zamierzasz wylogować się z aplikacji.", + "mapIndex": "Indeks mapowania", + "modal": { + "cancel": "Anuluj", + "confirm": "Potwierdź", + "delete": { + "button": "Usuń", + "confirmation": "Jesteś pewien, że chcesz usunąć {{resourceName}}? Ta operacja nie może być cofnięta." + } + }, + "nav": { + "admin": "Admin", + "assets": "Zasoby", + "browse": "Przegląd", + "dags": "Dagi", + "docs": "Pomoc", + "home": "Pulpit", + "legacyFabViews": "Widoki Fab z Airflow 2", + "plugins": "Wtyczki", + "security": "Dostęp" + }, + "noItemsFound": "Nie znaleziono modelu {{modelName}}", + "note": { + "add": "Dodać notatkę", + "dagRun": "Notatki wykonania", + "label": "Notatka", + "placeholder": "Dodaj notatkę...", + "taskInstance": "Notatka instancji zadania" + }, + "pools": { + "deferred": "Odłożone", + "open": "Otwarte", + "pools_few": "Pule", + "pools_many": "Puli", + "pools_one": "Pula", + "pools_other": "Pule", + "queued": "W kolejce", + "running": "W trakcie", + "scheduled": "Zaplanowane" + }, + "runId": "Identyfikator wykonania", + "runTypes": { + "asset_triggered": "Uruchomiony przez zasób", + "backfill": "Wypełnienie wsteczne", + "manual": "Ręcznie", + "scheduled": "Według harmonogramu" + }, + "scroll": { + "direction": { + "bottom": "dół", + "top": "góra" + }, + "tooltip": "Naciśnij {{hotkey}}, aby przewinąć do {{direction}}" + }, + "seconds": "{{count}}s", + "security": { + "actions": "Akcje", + "permissions": "Uprawnienia", + "resources": "Zasoby", + "roles": "Role", + "users": "Użytkownicy" + }, + "selectLanguage": "Wybierz język", + "showDetailsPanel": "Pokaż panel szczegółów", + "source": { + "hide": "Ukryj źródła", + "hotkey": "s", + "show": "Pokaż źródła" + }, + "sourceAssetEvent_few": "Zdarzenia źródłowych zasobów", + "sourceAssetEvent_many": "Zdarzeń źródłowych zasobów", + "sourceAssetEvent_one": "Zdarzenie źródłowego zasobu", + "sourceAssetEvent_other": "Zdarzenia źródłowych zasobów", + "startDate": "Data rozpoczęcia", + "state": "Stan", + "states": { + "deferred": "Odłożone", + "failed": "Niepowodzenie", + "no_status": "Brak stanu", + "none": "Brak stanu", + "queued": "W kolejce", + "removed": "Usunięte", + "restarting": "Restartowanie", + "running": "Wykonane", + "scheduled": "Zaplanowane", + "skipped": "Pominięte", + "success": "Sukces", + "up_for_reschedule": "Do ponownego zaplanowania", + "up_for_retry": "Do ponownej próby", + "upstream_failed": "Niepowodzenie poprzednika" + }, + "switchToDarkMode": "Przełącz na tryb ciemny", + "switchToLightMode": "Przełącz na tryb jasny", + "table": { + "completedAt": "Zakończono o", + "createdAt": "Utworzono o", + "filterByTag": "Filtruj Dagi według tagu", + "filterColumns": "Filtruj kolumny tabeli", + "filterReset_few": "Resetuj filtry", + "filterReset_many": "Resetuj filtry", + "filterReset_one": "Resetuj filtr", + "filterReset_other": "Resetuj filtry", + "from": "Od", + "maxActiveRuns": "Maksymalna liczba aktywnych wykonań", + "noTagsFound": "Nie znaleziono etykiet", + "tagMode": { + "all": "Wszystkie", + "any": "Dowolne" + }, + "tagPlaceholder": "Filtruj według etykiety", + "to": "Do" + }, + "task": { + "documentation": "Dokumentacja zadania", + "lastInstance": "Ostatnia instancja zadania", + "operator": "Operator", + "triggerRule": "Reguła zależności" + }, + "task_few": "Zadania", + "task_many": "Zadań", + "task_one": "Zadanie", + "task_other": "Zadania", + "taskId": "Identifikator Zadania", + "taskInstance": { + "dagVersion": "Wersja Daga", + "executor": "Wykonawca", + "executorConfig": "Konfiguracja wykonawcy", + "hostname": "Nazwa hosta", + "maxTries": "Maksymalna liczba prób", + "pid": "PID", + "pool": "Pula", + "poolSlots": "Sloty puli", + "priorityWeight": "Waga priorytetu", + "queue": "Kolejka", + "queuedWhen": "Zakolejkowano o", + "scheduledWhen": "Zaplanowano o", + "triggerer": { + "assigned": "Przypisany wyzwalacz", + "class": "Klasa wyzwalacza", + "createdAt": "Czas utworzenia wyzwalacza", + "id": "Identyfikator wyzwalacza", + "latestHeartbeat": "Ostatni heartbeat wyzwalacza", + "title": "Informacje o wyzwalaczu" + }, + "unixname": "Nazwa użytkownika systemu Unix" + }, + "taskInstance_few": "Instancje Zadań", + "taskInstance_many": "Instancji Zadań", + "taskInstance_one": "Instancja Zadania", + "taskInstance_other": "Instancje Zadań", + "timeRange": { + "last12Hours": "Ostatnie 12 godzin", + "last24Hours": "Ostatnie 24 godziny", + "lastHour": "Ostatnia godzina", + "pastWeek": "Poprzedni tydzień" + }, + "timestamp": { + "hide": "Ukryj znaczniki czasu", + "hotkey": "t", + "show": "Pokaż znaczniki czasu" + }, + "timezone": "Strefa czasowa", + "timezoneModal": { + "current-timezone": "Obecny czas w", + "placeholder": "Wybierz strefę czasową", + "title": "Wybierz strefę czasową", + "utc": "UTC (Uniwersalny Czas Koordynowany)" + }, + "toaster": { + "bulkDelete": { + "error": "Nie udało się wykonać żądania masowego usunięcia {{resourceName}}", + "success": { + "description": "Pomyślnie usunięto {{count}} {{resourceName}}. Klucze: {{keys}}", + "title": "Wysłano żądanie masowego usunięcia {{resourceName}}" + } + }, + "create": { + "error": "Nie udało się wykonać żądania utworzenia {{resourceName}}", + "success": { + "description": "Pomyślnie utworzono {{resourceName}}.", + "title": "Wysłano żądanie utworzenia {{resourceName}}" + } + }, + "delete": { + "error": "Nie udało się wykonać żądania usunięcia {{resourceName}}", + "success": { + "description": "Pomyślnie usunięto {{resourceName}}.", + "title": "Wysłano żądanie usunięcia {{resourceName}}" + } + }, + "import": { + "error": "Nie udało się wykonać żądania importu {{resourceName}}", + "success": { + "description": "Pomyślnie zaimportowano {{count}} {{resourceName}}.", + "title": "Wysłano żądanie importu {{resourceName}}" + } + }, + "update": { + "error": "Nie udało się wykonać żądania aktualizacji {{resourceName}}", + "success": { + "description": "Pomyślnie zaktualizowano {{resourceName}}.", + "title": "Wysłano żądanie aktualizacji {{resourceName}}" + } + } + }, + "total": "Wszystkie {{state}}", + "triggered": "Uruchomiony", + "tryNumber": "Numer próby", + "user": "Profil", + "wrap": { + "hotkey": "w", + "tooltip": "Wybierz {{hotkey}} aby przełączyć zawijanie", + "unwrap": "Wyłącz zawijanie", + "wrap": "Włącz zawijanie" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json new file mode 100644 index 0000000000000..0dc4b8699757d --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json @@ -0,0 +1,150 @@ +{ + "backfill": { + "affected_few": "{{count}} wykonania zostaną uruchomione.", + "affected_many": "{{count}} wykonań zostanie uruchomionych.", + "affected_one": "Dotyczy 1 wykonania.", + "affected_other": "Dotyczy {{count}} wykonań.", + "affectedNone": "Brak wykonań spełniających wybrane kryteria.", + "allRuns": "Wszystkie wykonania", + "backwards": "Uruchom wstecz", + "dateRange": "Zakres dat", + "dateRangeFrom": "Od", + "dateRangeTo": "Do", + "errorStartDateBeforeEndDate": "Data początkowa musi być wcześniejsza niż data końcowa", + "maxRuns": "Maksymalna liczba aktywnych wykonań", + "missingAndErroredRuns": "Brakujące i błędne wykonania", + "missingRuns": "Brakujące wykonania", + "reprocessBehavior": "Zachowanie ponownego przetwarzania", + "run": "Uruchom ponowne przetwarzanie", + "selectDescription": "Uruchom ten Dag dla zakresu dat", + "selectLabel": "Wypełnienie wsteczne", + "title": "Uruchom uzupełnienie wsteczne", + "toaster": { + "success": { + "description": "Ponowne przetworzenie zostało uruchomione.", + "title": "Ponowne przetwarzanie uruchomione" + } + }, + "tooltip": "Ponowne przetworzenie wymaga harmonogramu", + "unpause": "Wznów {{dag_display_name}} przy wykonaniu", + "validation": { + "datesRequired": "Należy podać zarówno datę początkową, jak i końcową interwału danych.", + "startBeforeEnd": "Data początkowa interwału danych musi być wcześniejsza lub równa dacie końcowej." + } + }, + "banner": { + "backfillInProgress": "Wypełnienie wsteczne w toku", + "cancel": "Anuluj uzupełnienie wsteczne", + "pause": "Wstrzymaj uzupełnienie wsteczne", + "unpause": "Wznów uzupełnienie wsteczne" + }, + "clipboard": { + "copy": "Kopiuj" + }, + "close": "Zamknij", + "configForm": { + "advancedOptions": "Zaawansowane opcje", + "configJson": "Konfiguracja JSON", + "invalidJson": "Nieprawidłowy format JSON: {{errorMessage}}" + }, + "dagWarnings": { + "error_few": "Błędy", + "error_many": "Błędów", + "error_one": "Błąd", + "error_other": "Błedy", + "errorAndWarning": "1 Błąd i {{warningText}}", + "warning_few": "{{count}} Ostrzeżenia", + "warning_many": "{{count}} Ostrzeżeń", + "warning_one": "1 Ostrzeżenie", + "warning_other": "{{count}} Ostrzeżenia" + }, + "durationChart": { + "duration": "Czas trwania (sekundy)", + "lastDagRun_few": "Ostatnie {{count}} wykonania", + "lastDagRun_many": "Ostatnich {{count}} wykonań", + "lastDagRun_one": "Ostatnie wykonanie", + "lastDagRun_other": "Ostatnie {{count}} wykonania", + "lastTaskInstance_few": "Ostatnie {{count}} instancje zadania", + "lastTaskInstance_many": "Ostatnich {{count}} instancji zadania", + "lastTaskInstance_one": "Ostatnia instancja zadania", + "lastTaskInstance_other": "Ostatnie {{count}} instancje zadania", + "queuedDuration": "Czas oczekiwania", + "runAfter": "Uruchom po", + "runDuration": "Czas trwania wykonania" + }, + "fileUpload": { + "files_few": "{{count}} pliki", + "files_many": "{{count}} plików", + "files_one": "{{count}} plik", + "files_other": "{{count}} plików" + }, + "flexibleForm": { + "placeholder": "Wybierz wartość", + "placeholderArray": "Wprowadź każdy ciąg w nowej linii", + "placeholderExamples": "Zacznij pisać, aby zobaczyć opcje", + "placeholderMulti": "Wybierz jedną lub wiele wartości", + "validationErrorArrayNotArray": "Wartość musi być tablicą.", + "validationErrorArrayNotNumbers": "Wszystkie elementy w tablicy muszą być liczbami.", + "validationErrorArrayNotObject": "Wszystkie elementy w tablicy muszą być obiektami.", + "validationErrorRequired": "To pole jest wymagane" + }, + "graph": { + "directionDown": "Od góry do dołu", + "directionLeft": "Od prawej do lewej", + "directionRight": "Od lewej do prawej", + "directionUp": "Od dołu do góry", + "downloadImage": "Pobierz obraz grafu", + "downloadImageError": "Nie udało się pobrać obrazu grafu.", + "downloadImageErrorTitle": "Pobieranie nieudane", + "otherDagRuns": "+Inne wykonania", + "taskCount_few": "{{count}} Zadania", + "taskCount_many": "{{count}} Zadań", + "taskCount_one": "{{count}} Zadanie", + "taskCount_other": "{{count}} Zadań", + "taskGroup": "Grupa zadań" + }, + "limitedList": "+{{count}} więcej", + "logs": { + "file": "Plik", + "location": "linia {{line}} w {{name}}" + }, + "reparseDag": "Ponowne przetworznie Daga", + "sortedAscending": "posortowane rosnąco", + "sortedDescending": "posortowane malejąco", + "sortedUnsorted": "nieposortowane", + "taskTries": "Próby zadania", + "toggleCardView": "Pokaż widok kart", + "toggleTableView": "Pokaż widok tabeli", + "triggerDag": { + "button": "Uruchom Daga", + "loading": "Ładowanie informacji o Dagach...", + "loadingFailed": "Nie udało się załadować informacji o Dagach. Spróbuj ponownie.", + "runIdHelp": "Opcjonalne - zostanie wygenerowane, jeśli nie podano", + "selectDescription": "Wyzwól pojedyncze wykonanie", + "selectLabel": "Pojedyncze wykonanie", + "title": "Uruchom Daga", + "toaster": { + "success": { + "description": "Dag został pomyślnie uruchomiony.", + "title": "Uruchomienie Daga" + } + }, + "unpause": "Wznów {{dagDisplayName}} przy wykonaniu" + }, + "trimText": { + "details": "Szczegóły", + "empty": "Pusty", + "noContent": "Brak treści" + }, + "versionDetails": { + "bundleLink": "Link do pakietu", + "bundleName": "Nazwa pakietu", + "bundleVersion": "Wersja pakietu", + "createdAt": "Utworzono", + "versionId": "Identyfikator wersji" + }, + "versionSelect": { + "dagVersion": "Wersja Daga", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dag.json new file mode 100644 index 0000000000000..be381ec582a3d --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dag.json @@ -0,0 +1,126 @@ +{ + "allRuns": "Wszystkie wykonania", + "blockingDeps": { + "dependency": "Zależność", + "reason": "Powód", + "title": "Zależności blokujące zaplanowanie zadania" + }, + "code": { + "bundleUrl": "Adres URL paczki Dagów", + "noCode": "Nie znaleziono kodu", + "parsedAt": "Przeanalizowano o:" + }, + "extraLinks": "Dodatkowe linki", + "grid": { + "buttons": { + "resetToLatest": "Przywróć do najnowszego", + "toggleGroup": "Przełącz grupę" + } + }, + "header": { + "buttons": { + "advanced": "Zaawansowane", + "dagDocs": "Dokumentacja Daga" + } + }, + "logs": { + "allLevels": "Wszystkie poziomy logowania", + "allSources": "Wszystkie źródła", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", + "fullscreen": { + "button": "Pełny ekran", + "tooltip": "Naciśnij {{hotkey}}, aby przejść do pełnego ekranu" + }, + "info": "INFO", + "noTryNumber": "Brak numeru próby", + "settings": "Ustawienia logowania", + "viewInExternal": "Zobacz logi w {{name}} (próba {{attempt}})", + "warning": "WARNING" + }, + "overview": { + "buttons": { + "failedRun_few": "Nieudane wykonania", + "failedRun_many": "Nieudanych wykonań", + "failedRun_one": "Nieudane wykonanie", + "failedRun_other": "Nieudane wykonania", + "failedTask_few": "Nieudane zadania", + "failedTask_many": "Nieudanych zadań", + "failedTask_one": "Nieudane zadanie", + "failedTask_other": "Nieudane zadania", + "failedTaskInstance_few": "Nieudane instancje zadań", + "failedTaskInstance_many": "Nieudanych instancji zadań", + "failedTaskInstance_one": "Nieudana instancja zadania", + "failedTaskInstance_other": "Nieudane instancje zadań" + }, + "charts": { + "assetEvent_few": "Utworzone zdarzenia zasobów", + "assetEvent_many": "Utworzonych zdarzeń zasobów", + "assetEvent_one": "Utworzone zdarzenie zasobu", + "assetEvent_other": "Utworzone zdarzenia zasobów" + }, + "failedLogs": { + "title": "Ostatnie logi nieudanych zadań", + "viewFullLogs": "Zobacz pełne logi" + } + }, + "panel": { + "buttons": { + "options": "Opcje", + "showGraph": "Pokaż graf", + "showGrid": "Pokaż siatkę" + }, + "dagRuns": { + "label": "Liczba wykonań Daga" + }, + "dependencies": { + "label": "Zależności", + "options": { + "allDagDependencies": "Wszystkie zależności Daga", + "externalConditions": "Warunki zewnętrzne", + "onlyTasks": "Tylko zadania" + }, + "placeholder": "Zależności" + }, + "graphDirection": { + "label": "Kierunek grafu" + } + }, + "paramsFailed": "Nie udało się załadować parametrów", + "parse": { + "toaster": { + "error": { + "description": "Nie udało się przetworzyć żądania prztworzenia Daga. Mogą istnieć oczekujące żądania przetworznia Dagów.", + "title": "Nie udało się ponownie przetworzyć Daga" + }, + "success": { + "description": "Dag zostanie wkrótce ponownie przetworzony.", + "title": "Żądanie ponownego przetworzenia Daga zostało pomyślnie wysłane" + } + } + }, + "tabs": { + "assetEvents": "Zdarzenia zasobów", + "auditLog": "Log audytu", + "backfills": "Wypełnienia wsteczne", + "code": "Kod", + "details": "Szczegóły", + "logs": "Logi", + "mappedTaskInstances_few": "Instancje zadań [{{count}}]", + "mappedTaskInstances_many": "Instancji zadań [{{count}}]", + "mappedTaskInstances_one": "Instancja zadania [{{count}}]", + "mappedTaskInstances_other": "Instancje zadań [{{count}}]", + "overview": "Przegląd", + "renderedTemplates": "Wyrenderowane szablony", + "requiredActions": "Wymagane akcje", + "runs": "Wykonania", + "taskInstances": "Instancje zadań", + "tasks": "Zadania", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "Zwiń wszystkie grupy zadań", + "expandAll": "Rozwiń wszystkie grupy zadań" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dags.json new file mode 100644 index 0000000000000..9ca67b21a5be2 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "Uaktualniono {{count}} z {{total}} zasobów", + "dagActions": { + "delete": { + "button": "Usuń Daga", + "warning": "Wszystkie metadane, włączając metadane Dagów, wykonań Dagów i zadań zostaną usunięte." + } + }, + "favoriteDag": "Ulubiony Dag", + "filters": { + "allRunTypes": "Wszystkie Typy Wykonań Dagów", + "allStates": "Wszystkie Stany", + "favorite": { + "all": "Wszystkie", + "favorite": "Ulubione", + "unfavorite": "Nieulubione" + }, + "paused": { + "active": "Aktywne", + "all": "Wszystkie", + "paused": "Wstrzymane" + }, + "runIdPatternFilter": "Szukaj Wykonań Dagów", + "triggeringUserNameFilter": "Szukaj według użytkownika wywołującego" + }, + "ownerLink": "Link do właściciela {{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "Nie znaleziono zadań.", + "title": "Liczba wybranych zadań: {{count}}" + }, + "clear": { + "button": "Wyczyść {{type}}", + "buttonTooltip": "Użyj shift+c żeby wyczyścić", + "error": "Nie udało się wyczyścić {{type}}", + "title": "Wyczyść {{type}}" + }, + "delete": { + "button": "Usuń {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "Usuń {{type}}", + "warning": "Wzystkie metadane związane z tym {{type}} zostaną usunięte" + }, + "error": "Błąd usuwania {{type}}", + "success": { + "description": "{{type}} został usunięty pomyślnie.", + "title": "{{type}} usunięto." + } + }, + "markAs": { + "button": "Ustaw {{type}} jako...", + "buttonTooltip": { + "failed": "Użyj shift+f aby oznaczyć jako nieudane", + "success": "Użyj shift+s aby oznaczyć jako udane" + }, + "title": "Ustaw {{type}} jako {{state}}" + }, + "options": { + "downstream": "Taski podrzędne", + "existingTasks": "Wyczyść istniejące zadania", + "future": "Przyszłe zadania", + "onlyFailed": "Wyczyść tylko nieudane zadania", + "past": "Przeszłe zadania", + "queueNew": "Kolejkuj nowe zadania", + "runOnLatestVersion": "Uruchom w najnowszej wersji paczki Dagów", + "upstream": "Taski nadrzędne" + } + }, + "search": { + "advanced": "Wyszukiwanie zaawansowane", + "clear": "Wyczyść wyszukiwanie", + "dags": "Szukaj Dagów", + "hotkey": "+K", + "tasks": "Szukaj instancji zadań" + }, + "sort": { + "displayName": { + "asc": "Sortuj według Nazwy Wyświetlanej (A-Z)", + "desc": "Sortuj według Nazwy Wyświetlanej (Z-A)" + }, + "lastRunStartDate": { + "asc": "Sortuj według Daty Rozpoczęcia Ostatniego Wykonania (Najwcześniejsze-Najnowsze)", + "desc": "Sortuj według Daty Rozpoczęcia Ostatniego Wykonania (Najnowsze-Najwcześniejsze)" + }, + "lastRunState": { + "asc": "Sortuj według Stanu Ostatniego Wykonania (A-Z)", + "desc": "Sortuj według Stanu Ostatniego Wykonania (Z-A)" + }, + "nextDagRun": { + "asc": "Sortuj według Następnego Wykonania (Najwcześniejsze-Najnowsze)", + "desc": "Sortuj według Następnego Wykonania (Najnowsze-Najwcześniejsze)" + }, + "placeholder": "Sortuj według" + }, + "unfavoriteDag": "Usuń Daga z ulubionych" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dashboard.json new file mode 100644 index 0000000000000..7b457bd5a6a39 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dashboard.json @@ -0,0 +1,49 @@ +{ + "favorite": { + "favoriteDags_few": "Pierwsze {{count}} ulubione Dagi", + "favoriteDags_many": "Pierwsze {{count}} ulubionych Dagów", + "favoriteDags_one": "Pierwszy ulubiony Dag", + "favoriteDags_other": "Pierwsze {{count}} ulubione Dagi", + "noDagRuns": "Brak uruchomień dla tego Daga.", + "noFavoriteDags": "Brak ulubionych. Kliknij ikonę gwiazdki obok Daga na liście, aby dodać go do ulubionych." + }, + "group": "Grupa", + "health": { + "dagProcessor": "Processor Dagów", + "health": "Stan Systemu", + "healthy": "Prawidłowy", + "lastHeartbeat": "Ostatni czas sprawdzenia", + "metaDatabase": "Baza Danych Meta", + "scheduler": "Planer zadań", + "status": "Stan", + "triggerer": "Cyngiel", + "unhealthy": "Nieprawidłowy" + }, + "history": "Historia", + "importErrors": { + "dagImportError_few": "Błędy importu Dagów", + "dagImportError_many": "Błędów importu Dagów", + "dagImportError_one": "Błąd importu Daga", + "dagImportError_other": "Błędy importu Dagów", + "searchByFile": "Szukaj według plików", + "timestamp": "Znacznik czasu" + }, + "managePools": "Zarządzaj pulami", + "noAssetEvents": "Nie znaleziono zdarzeń zasobów.", + "poolSlots": "", + "sortBy": { + "newestFirst": "Najnowsze Pierwsze", + "oldestFirst": "Najstarsze Pierwsze" + }, + "source": "Kod źródłowy", + "stats": { + "activeDags": "Aktywne Dagi", + "failedDags": "Nieudane Dagi", + "queuedDags": "Zakolejkowane Dagi", + "requiredActions": "Wymagane akcje", + "runningDags": "Wykonane Dagi", + "stats": "Statystyki" + }, + "uri": "Uri", + "welcome": "Witaj" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/hitl.json new file mode 100644 index 0000000000000..aeebd54f35c10 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/hitl.json @@ -0,0 +1,25 @@ +{ + "requiredAction_few": "Wymagane akcje", + "requiredAction_many": "Wymaganych akcji", + "requiredAction_one": "Wymagana akcja", + "requiredAction_other": "Wymagane akcje", + "requiredActionState": "Stan wymaganej akcji", + "response": { + "error": "Nie udało się uzyskać odpowiedzi", + "optionsDescription": "Wybierz opcje dla tej instancji zadania", + "optionsLabel": "Opcje", + "received": "Odpowiedź otrzymana o ", + "respond": "Odpowiedz", + "success": "Odpowiedź dla {{taskId}} została pomyślnie wysłana", + "title": "Instancja zadania wymagająca interwencji człowieka - {{taskId}}" + }, + "state": { + "approvalReceived": "Zatwierdzenie otrzymane", + "approvalRequired": "Wymagane zatwierdzenie", + "choiceReceived": "Wybór otrzymany", + "choiceRequired": "Wymagany wybór", + "rejectionReceived": "Odrzucenie otrzymane", + "responseReceived": "Odpowiedź otrzymana", + "responseRequired": "Wymagana odpowiedź" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/admin.json new file mode 100644 index 0000000000000..fa2e576915ad1 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/admin.json @@ -0,0 +1,167 @@ +{ + "columns": { + "description": "描述", + "key": "鍵", + "name": "名稱", + "value": "值" + }, + "config": { + "columns": { + "section": "區段" + }, + "title": "Airflow 設定" + }, + "connections": { + "add": "新增連線", + "columns": { + "connectionId": "連線 ID", + "connectionType": "連線類型", + "host": "主機", + "port": "埠" + }, + "connection_one": "連線", + "connection_other": "連線", + "delete": { + "deleteConnection_one": "刪除 1 個連線", + "deleteConnection_other": "刪除 {{count}} 個連線", + "firstConfirmMessage_one": "您即將刪除以下連線:", + "firstConfirmMessage_other": "您即將刪除以下連線:", + "title": "刪除連線" + }, + "edit": "編輯連線", + "form": { + "connectionIdRequired": "連線 ID 是必填的", + "connectionIdRequirement": "連線 ID 不能只包含空格", + "connectionTypeRequired": "連線類型是必填的", + "extraFields": "額外欄位", + "extraFieldsJson": "額外欄位 JSON", + "helperText": "找不到連線類型?請確保您已安裝對應的 Airflow Providers 套件。", + "helperTextForRedactedFields": "已遮蔽的欄位 ('***') 若未修改,將保持不變。", + "selectConnectionType": "選擇連線類型", + "standardFields": "標準欄位" + }, + "nothingFound": { + "description": "透過環境變數或密鑰管理器定義的連線不會列在此處。", + "documentationLink": "在 Airflow 文件中了解更多。", + "learnMore": "這些連線會在執行時間解析,不會在 UI 顯示。", + "title": "找不到連線" + }, + "searchPlaceholder": "搜尋連線", + "test": "測試連線", + "testDisabled": "測試連線功能已停用。請聯繫管理員以啟用。", + "typeMeta": { + "error": "取得連線類型中繼資料失敗", + "standardFields": { + "description": "描述", + "host": "主機", + "login": "登入", + "password": "密碼", + "port": "埠", + "url_schema": "Schema" + } + } + }, + "deleteActions": { + "button": "刪除", + "modal": { + "confirmButton": "確定刪除", + "secondConfirmMessage": "此動作無法復原。", + "thirdConfirmMessage": "您確定要繼續嗎?" + }, + "selected": "已選取", + "tooltip": "刪除所選連線" + }, + "formActions": { + "reset": "重置", + "save": "儲存" + }, + "plugins": { + "columns": { + "source": "來源" + }, + "importError_one": "外掛匯入錯誤", + "importError_other": "外掛匯入錯誤", + "searchPlaceholder": "搜尋檔案" + }, + "pools": { + "add": "新增資源池", + "deferredSlotsIncluded": "包含延後任務", + "delete": { + "title": "刪除資源池", + "warning": "這將刪除所有與此資源池相關的系統資料,可能會影響使用此資源池的任務。" + }, + "edit": "編輯資源池", + "form": { + "checkbox": "計算可用資源池配額時,將包含延後的任務", + "description": "描述", + "includeDeferred": "包含延後任務", + "nameMaxLength": "名稱最多只能包含 256 個字元", + "nameRequired": "名稱是必填的", + "slots": "配額" + }, + "noPoolsFound": "找不到資源池", + "pool_one": "資源池", + "pool_other": "資源池", + "searchPlaceholder": "搜尋資源池", + "sort": { + "asc": "名稱 (A-Z)", + "desc": "名稱 (Z-A)", + "placeholder": "排序方式" + } + }, + "providers": { + "columns": { + "packageName": "套件名稱", + "version": "版本" + } + }, + "variables": { + "add": "新增變數", + "columns": { + "isEncrypted": "是否加密" + }, + "delete": { + "deleteVariable_one": "刪除 1 個變數", + "deleteVariable_other": "刪除 {{count}} 個變數", + "firstConfirmMessage_one": "您即將刪除以下變數:", + "firstConfirmMessage_other": "您即將刪除以下變數:", + "title": "刪除變數", + "tooltip": "刪除所選變數" + }, + "edit": "編輯變數", + "export": "匯出", + "exportTooltip": "匯出所選變數", + "form": { + "invalidJson": "無效的 JSON", + "keyMaxLength": "鍵最多只能包含 250 個字元", + "keyRequired": "鍵是必填的", + "valueRequired": "值是必填的" + }, + "import": { + "button": "匯入", + "conflictResolution": "選擇變數衝突解決方式", + "errorParsingJsonFile": "解析 JSON 檔案時發生錯誤:請上傳包含變數的 JSON 檔案 (例如:{\"key\": \"value\", ...})。", + "options": { + "fail": { + "description": "如果偵測到任何已存在的變數,則匯入失敗。", + "title": "失敗" + }, + "overwrite": { + "description": "發生衝突時覆蓋變數。", + "title": "覆蓋" + }, + "skip": { + "description": "略過匯入已存在的變數。", + "title": "跳過" + } + }, + "title": "匯入變數", + "upload": "上傳 JSON 檔案", + "uploadPlaceholder": "上傳包含變數的 JSON 檔案 (例如:{\"key\": \"value\", ...})" + }, + "noRowsMessage": "找不到變數", + "searchPlaceholder": "搜尋鍵", + "variable_one": "變數", + "variable_other": "變數" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/assets.json new file mode 100644 index 0000000000000..bf5daf90e0629 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/assets.json @@ -0,0 +1,30 @@ +{ + "consumingDags": "消費者 Dags", + "createEvent": { + "button": "建立事件", + "manual": { + "description": "手動建立資源事件", + "extra": "資源事件額外資訊", + "label": "手動" + }, + "materialize": { + "description": "觸發資源上游的 Dag", + "descriptionWithDag": "觸發此資源上游的 Dag: {{dagName}}", + "label": "實體化", + "unpauseDag": "觸發時取消暫停 {{dagName}}" + }, + "success": { + "manualDescription": "已成功手動建立資源事件。", + "manualTitle": "已建立資源事件", + "materializeDescription": "已成功觸發上游 Dag {{dagId}}。", + "materializeTitle": "正在實體化資源" + }, + "title": "為 {{name}} 建立資源事件" + }, + "group": "群組", + "lastAssetEvent": "最後資源事件", + "name": "名稱", + "producingTasks": "生產任務", + "scheduledDags": "已排程的 Dags", + "searchPlaceholder": "搜尋資源" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/browse.json new file mode 100644 index 0000000000000..8cd520c09cb48 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog": { + "actions": { + "collapseAllExtra": "收合所有額外 JSON", + "expandAllExtra": "展開所有額外 JSON" + }, + "columns": { + "event": "事件", + "extra": "額外資訊", + "user": "使用者", + "when": "時間" + }, + "title": "審計日誌事件" + }, + "xcom": { + "columns": { + "dag": "Dag", + "key": "鍵", + "value": "值" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/common.json new file mode 100644 index 0000000000000..1423588781385 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/common.json @@ -0,0 +1,302 @@ +{ + "admin": { + "Config": "設定", + "Connections": "連線", + "Plugins": "外掛", + "Pools": "資源池", + "Providers": "Providers", + "Variables": "變數" + }, + "asset_one": "資源", + "asset_other": "資源", + "assetEvent_one": "資源事件", + "assetEvent_other": "資源事件", + "backfill_one": "回填", + "backfill_other": "回填", + "browse": { + "auditLog": "審計日誌", + "requiredActions": "待回應的任務實例", + "xcoms": "XComs" + }, + "collapseDetailsPanel": "收起詳細資訊", + "createdAssetEvent_one": "已建立資源事件", + "createdAssetEvent_other": "已建立資源事件", + "dag_one": "Dag", + "dag_other": "Dags", + "dagDetails": { + "catchup": "自動回填", + "concurrency": "並行數", + "dagRunTimeout": "Dag 執行超時", + "defaultArgs": "預設參數", + "description": "描述", + "documentation": "Dag 文件", + "fileLocation": "檔案位置", + "hasTaskConcurrencyLimits": "有任務並行數限制", + "lastExpired": "最後過期時間", + "lastParsed": "最後解析時間", + "latestDagVersion": "最新 Dag 版本", + "latestRun": "上次 Dag 執行", + "maxActiveRuns": "活躍執行數上限", + "maxActiveTasks": "活躍任務數上限", + "maxConsecutiveFailedDagRuns": "連續失敗執行數上限", + "nextRun": "下次 Dag 執行", + "owner": "擁有者", + "params": "參數", + "schedule": "排程", + "tags": "標籤" + }, + "dagId": "Dag ID", + "dagRun": { + "conf": "設定", + "dagVersions": "Dag 版本", + "dataIntervalEnd": "資料區間結束", + "dataIntervalStart": "資料區間起始", + "lastSchedulingDecision": "最後排程決策", + "queuedAt": "開始排隊時間", + "runAfter": "最早可執行時間", + "runType": "執行類型", + "sourceAssetEvent": "來源資源事件", + "triggeredBy": "觸發者", + "triggeringUser": "觸發使用者名稱" + }, + "dagRun_one": "Dag 執行", + "dagRun_other": "Dag 執行", + "dagRunId": "Dag 執行 ID", + "dagWarnings": "Dag 警告 / 錯誤", + "defaultToGraphView": "預設使用圖形視圖", + "defaultToGridView": "預設使用網格視圖", + "direction": "書寫方向", + "docs": { + "documentation": "文件", + "githubRepo": "GitHub 倉庫", + "restApiReference": "REST API 參考" + }, + "duration": "執行時間", + "endDate": "結束日期", + "error": { + "back": "返回", + "defaultMessage": "發生未預期的錯誤", + "home": "首頁", + "notFound": "找不到頁面", + "title": "錯誤" + }, + "expand": { + "collapse": "收合", + "expand": "展開", + "hotkey": "e", + "tooltip": "按下 {{hotkey}} 切換展開" + }, + "expression": { + "all": "全部", + "and": "且", + "any": "任何", + "or": "或" + }, + "logicalDate": "邏輯日期", + "logout": "登出", + "logoutConfirmation": "確定要登出嗎?", + "mapIndex": "映射索引", + "modal": { + "cancel": "取消", + "confirm": "確認", + "delete": { + "button": "刪除", + "confirmation": "確定要刪除 {{resourceName}} 嗎?此操作無法還原。" + } + }, + "nav": { + "admin": "管理者", + "assets": "資源", + "browse": "瀏覽", + "dags": "Dags", + "docs": "文件", + "home": "首頁", + "legacyFabViews": "舊版檢視", + "plugins": "插件", + "security": "安全" + }, + "noItemsFound": "找不到 {{modelName}}", + "note": { + "add": "新增筆記", + "dagRun": "Dag 執行筆記", + "label": "筆記", + "placeholder": "新增筆記...", + "taskInstance": "任務實例筆記" + }, + "pools": { + "deferred": "已延後", + "open": "開放", + "pools_one": "資源池", + "pools_other": "資源池", + "queued": "排隊中", + "running": "執行中", + "scheduled": "已排程" + }, + "runId": "執行 ID", + "runTypes": { + "asset_triggered": "資源觸發", + "backfill": "回填", + "manual": "手動觸發", + "scheduled": "已排程" + }, + "scroll": { + "direction": { + "bottom": "最下方", + "top": "最上方" + }, + "tooltip": "按 {{hotkey}} 捲動到{{direction}}" + }, + "seconds": "{{count}} 秒", + "security": { + "actions": "操作", + "permissions": "權限", + "resources": "資源", + "roles": "角色", + "users": "使用者" + }, + "selectLanguage": "選擇語言", + "showDetailsPanel": "顯示詳細資訊", + "source": { + "hide": "隱藏來源", + "hotkey": "s", + "show": "顯示來源" + }, + "sourceAssetEvent_one": "來源資源事件", + "sourceAssetEvent_other": "來源資源事件", + "startDate": "開始日期", + "state": "狀態", + "states": { + "deferred": "已延後", + "failed": "失敗", + "no_status": "無狀態", + "none": "無狀態", + "queued": "排隊中", + "removed": "已移除", + "restarting": "重啟中", + "running": "執行中", + "scheduled": "已排程", + "skipped": "已跳過", + "success": "成功", + "up_for_reschedule": "等待重新排程", + "up_for_retry": "等待重試", + "upstream_failed": "上游任務失敗" + }, + "switchToDarkMode": "切換到深色模式", + "switchToLightMode": "切換到淺色模式", + "table": { + "completedAt": "完成時間", + "createdAt": "建立時間", + "filterByTag": "依標籤篩選 Dags", + "filterColumns": "篩選表格欄位", + "filterReset_one": "重置篩選", + "filterReset_other": "重置篩選", + "from": "從", + "maxActiveRuns": "最大活躍執行數", + "noTagsFound": "找不到標籤", + "tagMode": { + "all": "全部", + "any": "任何" + }, + "tagPlaceholder": "依標籤篩選", + "to": "到" + }, + "task": { + "documentation": "任務文件", + "lastInstance": "最後實例", + "operator": "任務操作器", + "triggerRule": "觸發規則" + }, + "task_one": "任務", + "task_other": "任務", + "taskId": "任務 ID", + "taskInstance": { + "dagVersion": "Dag 版本", + "executor": "執行器", + "executorConfig": "執行器設定", + "hostname": "主機名稱", + "maxTries": "最大嘗試次數", + "pid": "PID", + "pool": "資源池", + "poolSlots": "資源池配額", + "priorityWeight": "優先權權重", + "queue": "排隊", + "queuedWhen": "開始排隊時間", + "scheduledWhen": "開始排程時間", + "triggerer": { + "assigned": "指派的觸發器", + "class": "觸發器類別", + "createdAt": "觸發器建立時間", + "id": "觸發器 ID", + "latestHeartbeat": "最新觸發器心跳時間", + "title": "觸發器資訊" + }, + "unixname": "Unix 名稱" + }, + "taskInstance_one": "任務實例", + "taskInstance_other": "任務實例", + "timeRange": { + "last12Hours": "最近 12 小時", + "last24Hours": "最近 24 小時", + "lastHour": "最近 1 小時", + "pastWeek": "過去一週" + }, + "timestamp": { + "hide": "隱藏時間戳記", + "hotkey": "t", + "show": "顯示時間戳記" + }, + "timezone": "時區", + "timezoneModal": { + "current-timezone": "目前時區", + "placeholder": "搜尋時區", + "title": "選擇時區", + "utc": "UTC" + }, + "toaster": { + "bulkDelete": { + "error": "批次刪除 {{resourceName}} 請求失敗", + "success": { + "description": "已成功刪除 {{count}} 個 {{resourceName}}。鍵:{{keys}}", + "title": "已提交批次刪除 {{resourceName}} 請求" + } + }, + "create": { + "error": "建立 {{resourceName}} 請求失敗", + "success": { + "description": "{{resourceName}} 已成功建立。", + "title": "已提交建立 {{resourceName}} 請求" + } + }, + "delete": { + "error": "刪除 {{resourceName}} 請求失敗", + "success": { + "description": "{{resourceName}} 已成功刪除。", + "title": "已提交刪除 {{resourceName}} 請求" + } + }, + "import": { + "error": "匯入 {{resourceName}} 請求失敗", + "success": { + "description": "已成功匯入 {{count}} 個 {{resourceName}}。", + "title": "已提交匯入 {{resourceName}} 請求" + } + }, + "update": { + "error": "更新 {{resourceName}} 請求失敗", + "success": { + "description": "{{resourceName}} 已成功更新。", + "title": "已提交更新 {{resourceName}} 請求" + } + } + }, + "total": "總計 {{state}}", + "triggered": "已觸發", + "tryNumber": "嘗試次數", + "user": "使用者", + "wrap": { + "hotkey": "w", + "tooltip": "按 {{hotkey}} 切換換行", + "unwrap": "不換行", + "wrap": "換行" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json new file mode 100644 index 0000000000000..77075ae59494d --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json @@ -0,0 +1,134 @@ +{ + "backfill": { + "affected_one": "將會觸發 1 次執行。", + "affected_other": "將會觸發 {{count}} 次執行。", + "affectedNone": "沒有符合條件的執行。", + "allRuns": "所有執行", + "backwards": "反向執行", + "dateRange": "日期範圍", + "dateRangeFrom": "從", + "dateRangeTo": "到", + "errorStartDateBeforeEndDate": "開始日期必須早於結束日期。", + "maxRuns": "活躍執行數上限", + "missingAndErroredRuns": "遺漏和錯誤的執行", + "missingRuns": "遺漏的執行", + "reprocessBehavior": "重新處理行為", + "run": "執行回填", + "selectDescription": "為指定的日期範圍補上 Dag 執行", + "selectLabel": "回填", + "title": "執行回填", + "toaster": { + "success": { + "description": "已成功觸發回填作業。", + "title": "已觸發 Dag 執行" + } + }, + "tooltip": "回填功能需要 Dag 具有排程", + "unpause": "觸發時取消暫停 {{dag_display_name}}", + "validation": { + "datesRequired": "必須提供資料區間的開始與結束日期。", + "startBeforeEnd": "資料區間起始日期必須早於或等於結束日期。" + } + }, + "banner": { + "backfillInProgress": "回填正在進行中", + "cancel": "取消回填", + "pause": "暫停回填", + "unpause": "取消暫停回填" + }, + "clipboard": { + "copy": "複製" + }, + "close": "關閉", + "configForm": { + "advancedOptions": "進階選項", + "configJson": "設定 JSON", + "invalidJson": "無效的 JSON 格式: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "1 個錯誤", + "errorAndWarning": "1 個錯誤與 {{warningText}}", + "warning_one": "1 個警告", + "warning_other": "{{count}} 個警告" + }, + "durationChart": { + "duration": "持續時間 (秒)", + "lastDagRun_one": "最近 1 次 Dag 執行", + "lastDagRun_other": "最近 {{count}} 次 Dag 執行", + "lastTaskInstance_one": "最近 1 次任務實例", + "lastTaskInstance_other": "最近 {{count}} 次任務實例", + "queuedDuration": "排隊等候時間", + "runAfter": "最早可執行時間", + "runDuration": "執行持續時間" + }, + "fileUpload": { + "files_other": "{{count}} 個檔案" + }, + "flexibleForm": { + "placeholder": "請選擇一個值", + "placeholderArray": "請逐行輸入,每行輸入一個字串", + "placeholderExamples": "開始輸入以查看選項", + "placeholderMulti": "可選擇單一或多個值", + "validationErrorArrayNotArray": "值必須是陣列格式。", + "validationErrorArrayNotNumbers": "陣列中的所有元素都必須是數字。", + "validationErrorArrayNotObject": "陣列中的所有元素都必須是物件。", + "validationErrorRequired": "此為必填欄位" + }, + "graph": { + "directionDown": "由上到下", + "directionLeft": "由右到左", + "directionRight": "由左到右", + "directionUp": "由下到上", + "downloadImage": "下載圖表圖片", + "downloadImageError": "下載圖表圖片失敗。", + "downloadImageErrorTitle": "下載失敗", + "otherDagRuns": "+ 其他 Dag 執行", + "taskCount_one": "1 個任務", + "taskCount_other": "{{count}} 個任務", + "taskGroup": "任務群組" + }, + "limitedList": "+ 其他 {{count}} 項", + "logs": { + "file": "檔案", + "location": "第 {{line}} 行,位於 {{name}}" + }, + "reparseDag": "重新解析 Dag", + "sortedAscending": "遞增排序", + "sortedDescending": "遞減排序", + "sortedUnsorted": "未排序", + "taskTries": "任務嘗試次數", + "toggleCardView": "顯示卡片視圖", + "toggleTableView": "顯示表格視圖", + "triggerDag": { + "button": "觸發", + "loading": "正在載入 Dag 資訊...", + "loadingFailed": "載入 Dag 資訊失敗,請重試。", + "runIdHelp": "選填 - 若未提供將會自動產生", + "selectDescription": "觸發此 Dag 單次執行", + "selectLabel": "單次執行", + "title": "觸發 Dag", + "toaster": { + "success": { + "description": "已成功觸發 Dag 執行。", + "title": "已觸發 Dag 執行" + } + }, + "unpause": "觸發時取消暫停 {{dagDisplayName}}" + }, + "trimText": { + "details": "詳細資訊", + "empty": "空的", + "noContent": "無可用內容。" + }, + "versionDetails": { + "bundleLink": "套件包連結", + "bundleName": "套件包名稱", + "bundleVersion": "套件包版本", + "createdAt": "建立時間", + "versionId": "版本 ID" + }, + "versionSelect": { + "dagVersion": "Dag 版本", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dag.json new file mode 100644 index 0000000000000..622b088679bb0 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dag.json @@ -0,0 +1,121 @@ +{ + "allRuns": "所有執行", + "blockingDeps": { + "dependency": "依賴 (Dependencies)", + "reason": "原因", + "title": "依賴 (Dependencies) 阻礙任務排程" + }, + "code": { + "bundleUrl": "套件包網址", + "noCode": "找不到程式碼", + "parsedAt": "解析時間:" + }, + "extraLinks": "額外連結", + "grid": { + "buttons": { + "resetToLatest": "重設為最新", + "toggleGroup": "切換群組狀態" + } + }, + "header": { + "buttons": { + "advanced": "進階功能", + "dagDocs": "Dag 文件" + } + }, + "logs": { + "allLevels": "所有日誌等級", + "allSources": "所有來源", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", + "fullscreen": { + "button": "全螢幕", + "tooltip": "按下 {{hotkey}} 進入全螢幕" + }, + "info": "INFO", + "noTryNumber": "沒有嘗試次數", + "settings": "日誌設定", + "viewInExternal": "在 {{name}} 中檢視日誌(嘗試 {{attempt}})", + "warning": "WARNING" + }, + "navigation": { + "jump": "跳躍: Shift+{{arrow}}", + "navigation": "導航: {{arrow}}", + "toggleGroup": "展開/收合群組: 空白鍵" + }, + "overview": { + "buttons": { + "failedRun_one": "失敗的執行", + "failedRun_other": "失敗的執行", + "failedTask_one": "失敗的任務", + "failedTask_other": "失敗的任務", + "failedTaskInstance_one": "失敗的任務實例", + "failedTaskInstance_other": "失敗的任務實例" + }, + "charts": { + "assetEvent_one": "已建立資源事件", + "assetEvent_other": "已建立資源事件" + }, + "failedLogs": { + "title": "最近失敗任務的日誌", + "viewFullLogs": "檢視完整日誌" + } + }, + "panel": { + "buttons": { + "options": "選項", + "showGraph": "顯示圖表", + "showGrid": "顯示網格" + }, + "dagRuns": { + "label": "Dag 執行次數" + }, + "dependencies": { + "label": "依賴 (Dependencies)", + "options": { + "allDagDependencies": "所有 Dag 相依性", + "externalConditions": "外部條件", + "onlyTasks": "僅限任務" + }, + "placeholder": "依賴 (Dependencies)" + }, + "graphDirection": { + "label": "圖表方向" + } + }, + "paramsFailed": "載入參數失敗", + "parse": { + "toaster": { + "error": { + "description": "Dag 解析請求失敗。可能還有待處理的解析請求。", + "title": "Dag 重新解析失敗" + }, + "success": { + "description": "Dag 即將重新解析。", + "title": "已成功提交重新解析請求" + } + } + }, + "tabs": { + "assetEvents": "資源事件", + "auditLog": "審計日誌", + "backfills": "回填", + "code": "程式碼", + "details": "詳細資訊", + "logs": "日誌", + "mappedTaskInstances_one": "任務實例 [{{count}}]", + "mappedTaskInstances_other": "任務實例 [{{count}}]", + "overview": "總覽", + "renderedTemplates": "渲染後的範本", + "requiredActions": "待回應的任務實例", + "runs": "執行紀錄", + "taskInstances": "任務實例", + "tasks": "任務", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "收合所有任務群組", + "expandAll": "展開所有任務群組" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dags.json new file mode 100644 index 0000000000000..52d84ca40e565 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dags.json @@ -0,0 +1,97 @@ +{ + "assetSchedule": "{{count}} / {{total}} 個資源事件已更新", + "dagActions": { + "delete": { + "button": "刪除 Dag", + "warning": "這將會刪除所有與此 Dag 相關的系統資料,包括 Dag 執行與任務。" + } + }, + "favoriteDag": "將 Dag 加入最愛", + "filters": { + "allRunTypes": "全部執行類型", + "allStates": "全部狀態", + "favorite": { + "all": "全部", + "favorite": "已加為最愛", + "unfavorite": "未加為最愛" + }, + "paused": { + "active": "啟用中", + "all": "全部", + "paused": "暫停" + }, + "runIdPatternFilter": "搜尋 Dag 執行", + "triggeringUserNameFilter": "搜尋觸發使用者名稱" + }, + "ownerLink": "擁有者 {{owner}} 的連結", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "找不到任務。", + "title": "受影響的任務: {{count}}" + }, + "clear": { + "button": "清除 {{type}}", + "buttonTooltip": "按下 shift+c 清除", + "error": "清除 {{type}} 時發生錯誤", + "title": "清除 {{type}}" + }, + "delete": { + "button": "刪除 {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "刪除 {{type}}", + "warning": "這將會刪除所有與此 {{type}} 相關的系統資料。" + }, + "error": "刪除 {{type}} 時發生錯誤", + "success": { + "description": "{{type}} 刪除請求成功。", + "title": "{{type}} 刪除成功" + } + }, + "markAs": { + "button": "標記 {{type}} 為...", + "buttonTooltip": { + "failed": "按下 shift+f 標記為失敗", + "success": "按下 shift+s 標記為成功" + }, + "title": "標記為 {{type}} 為 {{state}}" + }, + "options": { + "downstream": "下游", + "existingTasks": "清除現有任務", + "future": "未來", + "onlyFailed": "只清除失敗任務", + "past": "過去", + "queueNew": "排隊新任務", + "runOnLatestVersion": "執行最新套件包版本", + "upstream": "上游" + } + }, + "search": { + "advanced": "進階搜尋", + "clear": "清除搜尋", + "dags": "搜尋 Dags", + "hotkey": "+K", + "tasks": "搜尋任務" + }, + "sort": { + "displayName": { + "asc": "依顯示名稱排序 (A-Z)", + "desc": "依顯示名稱排序 (Z-A)" + }, + "lastRunStartDate": { + "asc": "依上次開始執行日期排序 (從新到舊)", + "desc": "依上次開始執行日期排序 (從新到舊)" + }, + "lastRunState": { + "asc": "依上次執行狀態排序 (A-Z)", + "desc": "依上次執行狀態排序 (Z-A)" + }, + "nextDagRun": { + "asc": "依下次執行時間排序 (由近而遠)", + "desc": "依下次執行時間排序 (由遠而近)" + }, + "placeholder": "排序方式" + }, + "unfavoriteDag": "將 Dag 移出最愛" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dashboard.json new file mode 100644 index 0000000000000..10e6e5553ae54 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dashboard.json @@ -0,0 +1,45 @@ +{ + "favorite": { + "favoriteDags_one": "第 {{ count }} 個最愛的 Dag", + "favoriteDags_other": "前 {{ count }} 個最愛的 Dag", + "noDagRuns": "此 Dag 尚未被觸發過。", + "noFavoriteDags": "尚無最愛的 Dag 。請點擊 Dag 列表旁的星號圖示,將 Dag 加入最愛。" + }, + "group": "群組", + "health": { + "dagProcessor": "Dag 處理器", + "health": "健康狀態", + "healthy": "健康", + "lastHeartbeat": "最後心跳", + "metaDatabase": "系統資料庫", + "scheduler": "排程器", + "status": "狀態", + "triggerer": "觸發器", + "unhealthy": "健康狀態異常" + }, + "history": "歷史記錄", + "importErrors": { + "dagImportError_one": "Dag 匯入錯誤", + "dagImportError_other": "Dag 匯入錯誤", + "searchByFile": "依檔案搜尋", + "timestamp": "時間戳記" + }, + "managePools": "管理資源池", + "noAssetEvents": "未找到資源事件", + "poolSlots": "資源池配額", + "sortBy": { + "newestFirst": "由新到舊", + "oldestFirst": "由舊到新" + }, + "source": "來源", + "stats": { + "activeDags": "啟用中的 Dags", + "failedDags": "失敗的 Dags", + "queuedDags": "排隊中的 Dags", + "requiredActions": "待回應的任務實例", + "runningDags": "執行中的 Dags", + "stats": "統計" + }, + "uri": "Uri", + "welcome": "歡迎" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/hitl.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/hitl.json new file mode 100644 index 0000000000000..59833a2250753 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/hitl.json @@ -0,0 +1,23 @@ +{ + "requiredAction_one": "待回應的任務實例", + "requiredAction_other": "待回應的任務實例", + "requiredActionState": "待回應的任務實例狀態", + "response": { + "error": "回應失敗", + "optionsDescription": "請為此任務實例選擇一個選項", + "optionsLabel": "選項", + "received": "收到回應的時間:", + "respond": "送出回應", + "success": "任務 {{taskId}} 回應成功", + "title": "人類參與流程任務實例 - {{taskId}}" + }, + "state": { + "approvalReceived": "已核准", + "approvalRequired": "需要核准", + "choiceReceived": "已選擇", + "choiceRequired": "需要選擇", + "rejectionReceived": "已拒絕", + "responseReceived": "已回應", + "responseRequired": "需要回應" + } +} diff --git a/airflow-core/src/airflow/ui/rules/i18n.js b/airflow-core/src/airflow/ui/rules/i18n.js new file mode 100644 index 0000000000000..309e7e65eaada --- /dev/null +++ b/airflow-core/src/airflow/ui/rules/i18n.js @@ -0,0 +1,171 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* eslint-disable @typescript-eslint/no-unsafe-argument */ + +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ +import jsoncParser from "jsonc-eslint-parser"; +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +export const i18nNamespace = "i18n"; +/** + * Extract all nested keys from translation object + * @param {Record} obj + * @param {string} [prefix] + * @returns {string[]} + */ +const getKeys = (obj, prefix = "") => { + if (Array.isArray(obj)) { + return []; + } + + return Object.keys(obj).flatMap((key) => { + const newPrefix = prefix ? `${prefix}.${key}` : key; + const value = obj[key]; + + if (typeof value === "object" && value !== null && !Array.isArray(value)) { + return [newPrefix, ...getKeys(value, newPrefix)]; + } + + return [newPrefix]; + }); +}; + +// Path to locales directory +const localesDir = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../public/i18n/locales"); + +// Default language (English) as reference +const defaultLanguage = "en"; +const defaultLanguageKeys = /** @type {Record} */ ({}); +const defaultLanguageDir = path.join(localesDir, defaultLanguage); + +// Load translation keys from default language files +fs.readdirSync(defaultLanguageDir) + .filter((file) => file.endsWith(".json")) + .forEach((jsonFile) => { + const ns = path.basename(jsonFile, ".json"); + const filePath = path.join(defaultLanguageDir, jsonFile); + const fileContent = fs.readFileSync(filePath, "utf8"); + const parsedJson = JSON.parse(fileContent); + + if (typeof parsedJson === "object" && parsedJson !== null && !Array.isArray(parsedJson)) { + defaultLanguageKeys[ns] = getKeys(parsedJson); + } + }); + +export const i18nPlugin = { + files: ["public/i18n/locales/**/*.json"], + rules: { + "check-translation-completeness": { + /** @param {import('@typescript-eslint/utils').TSESLint.RuleContext<'missingKeys' | 'fileError', []>} context */ + create(context) { + return { + /** @param {import('@typescript-eslint/utils').TSESTree.Program} node */ + Program(node) { + // Get language code and namespace from file path + const currentFilePath = context.filename; + const langCode = path.dirname(path.relative(localesDir, currentFilePath)); + const namespace = path.basename(currentFilePath, ".json"); + + if (langCode === defaultLanguage) { + return; + } + + // Get keys from current file + const referenceKeys = defaultLanguageKeys[namespace]; + let langKeys; + + try { + const parsedLangJson = JSON.parse(context.sourceCode.text); + + if ( + typeof parsedLangJson === "object" && + parsedLangJson !== null && + !Array.isArray(parsedLangJson) + ) { + langKeys = getKeys(parsedLangJson); + } else { + context.report({ + data: { error: "Invalid JSON object.", filePath: currentFilePath }, + messageId: "fileError", + node, + }); + + return; + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + + context.report({ + data: { error: message, filePath: currentFilePath }, + messageId: "fileError", + node, + }); + + return; + } + + // Check for missing translations + const langKeysSet = new Set(langKeys); + const missingKeys = referenceKeys.filter((key) => !langKeysSet.has(key)); + + if (missingKeys.length > 0) { + context.report({ + data: { keys: missingKeys.join(", "), lang: langCode, namespace }, + messageId: "missingKeys", + node, + }); + } + }, + }; + }, + meta: { + docs: { + category: "Best Practices", + description: "Ensures non-default lang files have all keys from default.", + recommended: "warn", + }, + messages: { + fileError: "Failed to read/parse {{filePath}}. Error: {{error}}", + missingKeys: "Lang '{{lang}}' (namespace: {{namespace}}) missing keys: {{keys}}", + }, + type: "problem", + }, + }, + }, +}; + +/** @type {import("@typescript-eslint/utils/ts-eslint").FlatConfig.Config} */ +export const i18nRules = { + files: ["public/i18n/locales/**/*.json"], + languageOptions: { + parser: jsoncParser, + parserOptions: { + extraFileExtensions: [".json"], + }, + }, + plugins: { + [i18nNamespace]: i18nPlugin, + }, + rules: { + [`${i18nNamespace}/check-translation-completeness`]: "warn", + }, +}; diff --git a/airflow-core/src/airflow/ui/rules/i18next.js b/airflow-core/src/airflow/ui/rules/i18next.js new file mode 100644 index 0000000000000..1ba9fc3ded75a --- /dev/null +++ b/airflow-core/src/airflow/ui/rules/i18next.js @@ -0,0 +1,69 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * @import { FlatConfig } from "@typescript-eslint/utils/ts-eslint"; + */ +import i18nextPlugin from "eslint-plugin-i18next"; + +import { ERROR } from "./levels.js"; + +const allExtensions = "*.{j,t}s{x,}"; + +/** + * ESLint rules for i18next to enforce internationalization best practices. + * This is a customized configuration. + * + * @see [eslint-plugin-i18next](https://github.com/edvardchen/eslint-plugin-i18next) + */ +export const i18nextRules = /** @type {const} @satisfies {FlatConfig.Config} */ ({ + files: [ + // Check files in the ui/src directory + `src/**/${allExtensions}`, + ], + ignores: [ + // Ignore test files + "src/**/*.test.tsx", + ], + plugins: { + i18next: i18nextPlugin, + }, + rules: { + /** + * Enforce no literal strings in JSX/TSX markup. + * This rule helps ensure all user-facing strings are properly internationalized. + * + * @example + * ```typescript + * // ❌ Incorrect + *
Hello, world!
+ * + * // ✅ Correct + *
{translate('greeting')}
+ * ``` + * @see [i18next/no-literal-string](https://github.com/edvardchen/eslint-plugin-i18next#no-literal-string) + */ + "i18next/no-literal-string": [ + ERROR, + { + markupOnly: true, + }, + ], + }, +}); diff --git a/airflow-core/src/airflow/ui/rules/jsonc.js b/airflow-core/src/airflow/ui/rules/jsonc.js new file mode 100644 index 0000000000000..a39418eb31ce6 --- /dev/null +++ b/airflow-core/src/airflow/ui/rules/jsonc.js @@ -0,0 +1,45 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import jsonc from "eslint-plugin-jsonc"; + +import { ERROR } from "./levels.js"; + +export const jsoncNamespace = "jsonc"; + +/** @type {import("@typescript-eslint/utils/ts-eslint").FlatConfig.Config} */ +export const jsoncRules = { + files: ["public/i18n/locales/**/*.json"], + plugins: { + [jsoncNamespace]: jsonc, + }, + rules: { + /** + * Enforce sorted keys. + * + * @see [jsonc/sort-keys](https://ota-meshi.github.io/eslint-plugin-jsonc/rules/sort-keys.html) + */ + [`${jsoncNamespace}/sort-keys`]: [ + ERROR, + "asc", + { + natural: true, + }, + ], + }, +}; diff --git a/airflow-core/src/airflow/ui/rules/typescript.js b/airflow-core/src/airflow/ui/rules/typescript.js index 2583b602e7264..ed05e15089799 100644 --- a/airflow-core/src/airflow/ui/rules/typescript.js +++ b/airflow-core/src/airflow/ui/rules/typescript.js @@ -38,6 +38,7 @@ export const typescriptNamespace = "@typescript-eslint"; * @see [@typescript-eslint/eslint-plugin](https://typescript-eslint.io/rules/) */ export const typescriptRules = /** @type {const} @satisfies {FlatConfig.Config} */ ({ + files: ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx"], languageOptions: { parser: typescriptParser, parserOptions: { diff --git a/airflow-core/src/airflow/ui/src/components/ActionAccordion/ActionAccordion.tsx b/airflow-core/src/airflow/ui/src/components/ActionAccordion/ActionAccordion.tsx index 3995b7893080b..f9c6c42e7063e 100644 --- a/airflow-core/src/airflow/ui/src/components/ActionAccordion/ActionAccordion.tsx +++ b/airflow-core/src/airflow/ui/src/components/ActionAccordion/ActionAccordion.tsx @@ -18,13 +18,14 @@ */ import { Box, Editable, Text, VStack } from "@chakra-ui/react"; import type { ChangeEvent } from "react"; +import { useTranslation } from "react-i18next"; import type { DAGRunResponse, TaskInstanceCollectionResponse } from "openapi/requests/types.gen"; import ReactMarkdown from "src/components/ReactMarkdown"; import { Accordion } from "src/components/ui"; import { DataTable } from "../DataTable"; -import { columns } from "./columns"; +import { getColumns } from "./columns"; type Props = { readonly affectedTasks?: TaskInstanceCollectionResponse; @@ -36,6 +37,7 @@ type Props = { // TODO: Make a front-end only unconnected table component with client side ordering and pagination const ActionAccordion = ({ affectedTasks, note, setNote }: Props) => { const showTaskSection = affectedTasks !== undefined; + const { t: translate } = useTranslation(); return ( { {showTaskSection ? ( - Affected Tasks: {affectedTasks.total_entries} + + {translate("dags:runAndTaskActions.affectedTasks.title", { + count: affectedTasks.total_entries, + })} + @@ -64,7 +71,7 @@ const ActionAccordion = ({ affectedTasks, note, setNote }: Props) => { ) : undefined} - Note + {translate("note.label")} { {Boolean(note) ? ( {note} ) : ( - Add a note... + {translate("note.placeholder")} )} diff --git a/airflow-core/src/airflow/ui/src/components/ActionAccordion/columns.tsx b/airflow-core/src/airflow/ui/src/components/ActionAccordion/columns.tsx index dd008cbafb584..a787d1d2fc3a2 100644 --- a/airflow-core/src/airflow/ui/src/components/ActionAccordion/columns.tsx +++ b/airflow-core/src/airflow/ui/src/components/ActionAccordion/columns.tsx @@ -16,30 +16,17 @@ * specific language governing permissions and limitations * under the License. */ -import { Link } from "@chakra-ui/react"; -import type { ColumnDef } from "@tanstack/react-table"; -import { Link as RouterLink } from "react-router-dom"; +import type { TFunction } from "i18next"; import type { TaskInstanceResponse } from "openapi/requests/types.gen"; +import type { MetaColumn } from "src/components/DataTable/types"; import { StateBadge } from "src/components/StateBadge"; -import { Tooltip } from "src/components/ui"; -import { getTaskInstanceLink } from "src/utils/links"; -import { trimText } from "src/utils/trimTextFn"; -export const columns: Array> = [ +export const getColumns = (translate: TFunction): Array> => [ { - accessorKey: "task_display_name", - cell: ({ row: { original } }) => ( - - - - {trimText(original.task_display_name, 25).trimmedText} - - - - ), - enableSorting: false, - header: "Task ID", + accessorKey: "task_id", + header: translate("taskId"), + size: 200, }, { accessorKey: "state", @@ -47,19 +34,19 @@ export const columns: Array> = [ row: { original: { state }, }, - }) => {state}, - enableSorting: false, - header: () => "State", + }) => ( + + {state ? translate(`common:states.${state}`) : translate("common:states.no_status")} + + ), + header: translate("state"), }, { - accessorKey: "rendered_map_index", - enableSorting: false, - header: "Map Index", + accessorKey: "map_index", + header: translate("mapIndex"), }, - { - accessorKey: "dag_run_id", - enableSorting: false, - header: "Run Id", + accessorKey: "run_id", + header: translate("runId"), }, ]; diff --git a/airflow-core/src/airflow/ui/src/components/AssetExpression/AndGateNode.tsx b/airflow-core/src/airflow/ui/src/components/AssetExpression/AndGateNode.tsx index 639e411b16e41..b39c96f312191 100644 --- a/airflow-core/src/airflow/ui/src/components/AssetExpression/AndGateNode.tsx +++ b/airflow-core/src/airflow/ui/src/components/AssetExpression/AndGateNode.tsx @@ -18,36 +18,41 @@ */ import { Box, VStack, Badge } from "@chakra-ui/react"; import type { PropsWithChildren } from "react"; +import { useTranslation } from "react-i18next"; import { TbLogicAnd } from "react-icons/tb"; -export const AndGateNode = ({ children }: PropsWithChildren) => ( - - { + const { t: translate } = useTranslation("common"); + + return ( + - - AND - - - {children} - - -); + + + {translate("expression.and")} + + + {children} + + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/components/AssetExpression/AssetExpression.tsx b/airflow-core/src/airflow/ui/src/components/AssetExpression/AssetExpression.tsx index 2c27e32d72784..9b49fd8b94726 100644 --- a/airflow-core/src/airflow/ui/src/components/AssetExpression/AssetExpression.tsx +++ b/airflow-core/src/airflow/ui/src/components/AssetExpression/AssetExpression.tsx @@ -18,6 +18,7 @@ */ import { Box, Badge } from "@chakra-ui/react"; import { Fragment } from "react"; +import { useTranslation } from "react-i18next"; import { TbLogicOr } from "react-icons/tb"; import { AndGateNode } from "./AndGateNode"; @@ -32,6 +33,8 @@ export const AssetExpression = ({ readonly events?: Array; readonly expression: ExpressionType | null; }) => { + const { t: translate } = useTranslation("common"); + if (expression === null) { return undefined; } @@ -54,7 +57,7 @@ export const AssetExpression = ({ {expression.any && index === expression.any.length - 1 ? undefined : ( - OR + {translate("expression.or")} )} diff --git a/airflow-core/src/airflow/ui/src/components/Assets/AssetEvent.tsx b/airflow-core/src/airflow/ui/src/components/Assets/AssetEvent.tsx index 6c97c648de5a7..af7a07a76b476 100644 --- a/airflow-core/src/airflow/ui/src/components/Assets/AssetEvent.tsx +++ b/airflow-core/src/airflow/ui/src/components/Assets/AssetEvent.tsx @@ -16,7 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import { Box, Text, HStack, Code } from "@chakra-ui/react"; +import { Box, Text, HStack } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; import { FiDatabase } from "react-icons/fi"; import { Link } from "react-router-dom"; @@ -24,60 +25,67 @@ import type { AssetEventResponse } from "openapi/requests/types.gen"; import Time from "src/components/Time"; import { Tooltip } from "src/components/ui"; +import RenderedJsonField from "../RenderedJsonField"; import { TriggeredRuns } from "./TriggeredRuns"; export const AssetEvent = ({ assetId, event, - showExtra, }: { readonly assetId?: number; readonly event: AssetEventResponse; - readonly showExtra?: boolean; }) => { + const { t: translate } = useTranslation("dashboard"); let source = ""; - // eslint-disable-next-line @typescript-eslint/naming-convention - const { from_rest_api, from_trigger, ...extra } = event.extra ?? {}; + const { from_rest_api: fromRestAPI, from_trigger: fromTrigger, ...extra } = event.extra ?? {}; - if (from_rest_api === true) { + if (fromRestAPI === true) { source = "API"; - } else if (from_trigger === true) { + } else if (fromTrigger === true) { source = "Trigger"; } - const extraString = JSON.stringify(extra); - return ( - + {Boolean(assetId) ? undefined : ( - + + + - group: {event.group ?? ""} - uri: {event.uri ?? ""} + + {translate("group")}: {event.group ?? ""} + + + {translate("uri")}: {event.uri ?? ""} + } showArrow > - {event.name ?? ""} + + {event.name ?? ""} + )} - Source: + {translate("source")}: {source === "" ? ( -1 ? `/mapped/${event.source_map_index}` : ""}`} > - {event.source_dag_id} + + {event.source_dag_id} + ) : ( source @@ -86,7 +94,9 @@ export const AssetEvent = ({ - {showExtra && extraString !== "{}" ? {extraString} : undefined} + {Object.keys(extra).length >= 1 ? ( + + ) : undefined} ); }; diff --git a/airflow-core/src/airflow/ui/src/components/Assets/AssetEvents.tsx b/airflow-core/src/airflow/ui/src/components/Assets/AssetEvents.tsx index f06f55185c917..8ba4b58133639 100644 --- a/airflow-core/src/airflow/ui/src/components/Assets/AssetEvents.tsx +++ b/airflow-core/src/airflow/ui/src/components/Assets/AssetEvents.tsx @@ -16,21 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -import { Box, Heading, Flex, HStack, Skeleton } from "@chakra-ui/react"; +import { Box, Heading, Flex, HStack, Skeleton, Separator } from "@chakra-ui/react"; +import type { BoxProps } from "@chakra-ui/react"; import { createListCollection } from "@chakra-ui/react/collection"; +import { useTranslation } from "react-i18next"; import { FiDatabase } from "react-icons/fi"; import type { AssetEventCollectionResponse, AssetEventResponse } from "openapi/requests/types.gen"; import { StateBadge } from "src/components/StateBadge"; import { Select } from "src/components/ui"; -import { pluralize } from "src/utils"; import { DataTable } from "../DataTable"; import type { CardDef, TableState } from "../DataTable/types"; import { AssetEvent } from "./AssetEvent"; -const cardDef = (assetId?: number, showExtra?: boolean): CardDef => ({ - card: ({ row }) => , +const cardDef = (assetId?: number): CardDef => ({ + card: ({ row }) => , meta: { customSkeleton: , }, @@ -42,9 +43,8 @@ type AssetEventProps = { readonly isLoading?: boolean; readonly setOrderBy?: (order: string) => void; readonly setTableUrlState?: (state: TableState) => void; - readonly showExtra?: boolean; readonly tableUrlState?: TableState; - readonly title?: string; + readonly titleKey?: string; }; export const AssetEvents = ({ @@ -53,27 +53,28 @@ export const AssetEvents = ({ isLoading, setOrderBy, setTableUrlState, - showExtra, tableUrlState, - title, -}: AssetEventProps) => { + titleKey, + ...rest +}: AssetEventProps & BoxProps) => { + const { t: translate } = useTranslation(["dashboard", "common", "dag"]); const assetSortOptions = createListCollection({ items: [ - { label: "Newest first", value: "-timestamp" }, - { label: "Oldest first", value: "timestamp" }, + { label: translate("sortBy.newestFirst"), value: "-timestamp" }, + { label: translate("sortBy.oldestFirst"), value: "timestamp" }, ], }); return ( - - + + {data?.total_entries ?? " "} - {pluralize(title ?? "Asset Event", data?.total_entries ?? 0, undefined, true)} + {translate(titleKey ?? "common:assetEvent", { count: data?.total_entries ?? 0 })} {setOrderBy === undefined ? undefined : ( @@ -83,6 +84,7 @@ export const AssetEvents = ({ data-testid="asset-sort-duration" defaultValue={["-timestamp"]} onValueChange={(option) => setOrderBy(option.value[0] as string)} + size="sm" width={130} > @@ -99,14 +101,16 @@ export const AssetEvents = ({ )} + { + const { t: translate } = useTranslation("common"); + if (dagRuns === undefined || dagRuns.length === 0) { return undefined; } return dagRuns.length === 1 ? ( - Triggered Dag Run: + {`${translate("triggered")} ${translate("dagRun_one")}`}: @@ -49,7 +51,7 @@ export const TriggeredRuns = ({ dagRuns }: Props) => { diff --git a/airflow-core/src/airflow/ui/src/components/Banner/BackfillBanner.tsx b/airflow-core/src/airflow/ui/src/components/Banner/BackfillBanner.tsx index 97714ebe75624..3b05ac63a0ddd 100644 --- a/airflow-core/src/airflow/ui/src/components/Banner/BackfillBanner.tsx +++ b/airflow-core/src/airflow/ui/src/components/Banner/BackfillBanner.tsx @@ -17,17 +17,18 @@ * under the License. */ import { Box, HStack, Spacer, Text, type ButtonProps } from "@chakra-ui/react"; +import { useQueryClient } from "@tanstack/react-query"; +import { useTranslation } from "react-i18next"; import { MdPause, MdPlayArrow, MdStop } from "react-icons/md"; import { RiArrowGoBackFill } from "react-icons/ri"; import { useBackfillServiceCancelBackfill, - useBackfillServiceListBackfills, - useBackfillServiceListBackfillsKey, + useBackfillServiceListBackfillsUi, + useBackfillServiceListBackfillsUiKey, useBackfillServicePauseBackfill, useBackfillServiceUnpauseBackfill, } from "openapi/queries"; -import { queryClient } from "src/queryClient"; import Time from "../Time"; import { Button, ProgressBar } from "../ui"; @@ -45,18 +46,20 @@ const buttonProps = { variant: "outline", } satisfies ButtonProps; -const onSuccess = async () => { - await queryClient.invalidateQueries({ - queryKey: [useBackfillServiceListBackfillsKey], - }); -}; - const BackfillBanner = ({ dagId }: Props) => { - const { data, isLoading } = useBackfillServiceListBackfills({ + const { t: translate } = useTranslation("components"); + const { data, isLoading } = useBackfillServiceListBackfillsUi({ dagId, }); const [backfill] = data?.backfills.filter((bf) => bf.completed_at === null) ?? []; + const queryClient = useQueryClient(); + const onSuccess = async () => { + await queryClient.invalidateQueries({ + queryKey: [useBackfillServiceListBackfillsUiKey], + }); + }; + const { isPending: isPausePending, mutate: pauseMutate } = useBackfillServicePauseBackfill({ onSuccess }); const { isPending: isUnPausePending, mutate: unpauseMutate } = useBackfillServiceUnpauseBackfill({ onSuccess, @@ -65,15 +68,21 @@ const BackfillBanner = ({ dagId }: Props) => { const { isPending: isStopPending, mutate: stopPending } = useBackfillServiceCancelBackfill({ onSuccess }); const togglePause = () => { - if (backfill?.is_paused) { + if (backfill === undefined) { + return; + } + if (backfill.is_paused) { unpauseMutate({ backfillId: backfill.id }); } else { - pauseMutate({ backfillId: backfill?.id }); + pauseMutate({ backfillId: backfill.id }); } }; const cancel = () => { - stopPending({ backfillId: backfill?.id }); + if (backfill === undefined) { + return; + } + stopPending({ backfillId: backfill.id }); }; if (isLoading || backfill === undefined) { @@ -84,7 +93,7 @@ const BackfillBanner = ({ dagId }: Props) => { - Backfill in progress: + {translate("banner.backfillInProgress")}: {" "} diff --git a/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceButton.tsx b/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceButton.tsx index 68750c2c949ad..5ce1edf0f3d3a 100644 --- a/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceButton.tsx +++ b/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceButton.tsx @@ -17,35 +17,57 @@ * under the License. */ import { Box, useDisclosure } from "@chakra-ui/react"; +import { useHotkeys } from "react-hotkeys-hook"; +import { useTranslation } from "react-i18next"; import { CgRedo } from "react-icons/cg"; import type { TaskInstanceResponse } from "openapi/requests/types.gen"; +import { Tooltip } from "src/components/ui"; import ActionButton from "src/components/ui/ActionButton"; import ClearTaskInstanceDialog from "./ClearTaskInstanceDialog"; type Props = { + readonly isHotkeyEnabled?: boolean; readonly taskInstance: TaskInstanceResponse; readonly withText?: boolean; }; -const ClearTaskInstanceButton = ({ taskInstance, withText = true }: Props) => { +const ClearTaskInstanceButton = ({ isHotkeyEnabled = false, taskInstance, withText = true }: Props) => { const { onClose, onOpen, open } = useDisclosure(); + const { t: translate } = useTranslation(); + + useHotkeys( + "shift+c", + () => { + onOpen(); + }, + { enabled: isHotkeyEnabled }, + ); return ( - - } - onClick={onOpen} - text="Clear Task Instance" - withText={withText} - /> - - {open ? ( - - ) : undefined} - + + + } + onClick={onOpen} + text={translate("dags:runAndTaskActions.clear.button", { type: translate("taskInstance_one") })} + withText={withText} + /> + + {open ? ( + + ) : undefined} + + ); }; diff --git a/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceDialog.tsx b/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceDialog.tsx index 53126f9e88e2a..62c0426790cb6 100644 --- a/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceDialog.tsx +++ b/airflow-core/src/airflow/ui/src/components/Clear/TaskInstance/ClearTaskInstanceDialog.tsx @@ -18,12 +18,14 @@ */ import { Flex, Heading, VStack } from "@chakra-ui/react"; import { useState } from "react"; +import { useTranslation } from "react-i18next"; import { CgRedo } from "react-icons/cg"; +import { useDagServiceGetDagDetails } from "openapi/queries"; import type { TaskInstanceResponse } from "openapi/requests/types.gen"; import { ActionAccordion } from "src/components/ActionAccordion"; import Time from "src/components/Time"; -import { Button, Dialog } from "src/components/ui"; +import { Button, Dialog, Checkbox } from "src/components/ui"; import SegmentedControl from "src/components/ui/SegmentedControl"; import { useClearTaskInstances } from "src/queries/useClearTaskInstances"; import { useClearTaskInstancesDryRun } from "src/queries/useClearTaskInstancesDryRun"; @@ -38,6 +40,7 @@ type Props = { const ClearTaskInstanceDialog = ({ onClose, open, taskInstance }: Props) => { const taskId = taskInstance.task_id; const mapIndex = taskInstance.map_index; + const { t: translate } = useTranslation(); const dagId = taskInstance.dag_id; const dagRunId = taskInstance.dag_run_id; @@ -55,6 +58,7 @@ const ClearTaskInstanceDialog = ({ onClose, open, taskInstance }: Props) => { const future = selectedOptions.includes("future"); const upstream = selectedOptions.includes("upstream"); const downstream = selectedOptions.includes("downstream"); + const [runOnLatestVersion, setRunOnLatestVersion] = useState(false); const [note, setNote] = useState(taskInstance.note); const { isPending: isPendingPatchDagRun, mutate: mutatePatchTaskInstance } = usePatchTaskInstance({ @@ -64,6 +68,11 @@ const ClearTaskInstanceDialog = ({ onClose, open, taskInstance }: Props) => { taskId, }); + // Get current DAG's bundle version to compare with task instance's DAG version bundle version + const { data: dagDetails } = useDagServiceGetDagDetails({ + dagId, + }); + const { data } = useClearTaskInstancesDryRun({ dagId, options: { @@ -77,6 +86,7 @@ const ClearTaskInstanceDialog = ({ onClose, open, taskInstance }: Props) => { include_past: past, include_upstream: upstream, only_failed: onlyFailed, + run_on_latest_version: runOnLatestVersion, task_ids: [[taskId, mapIndex]], }, }); @@ -86,14 +96,28 @@ const ClearTaskInstanceDialog = ({ onClose, open, taskInstance }: Props) => { total_entries: 0, }; + // Check if bundle versions are different + const currentDagBundleVersion = dagDetails?.bundle_version; + const taskInstanceDagVersionBundleVersion = taskInstance.dag_version?.bundle_version; + const bundleVersionsDiffer = currentDagBundleVersion !== taskInstanceDagVersionBundleVersion; + const shouldShowBundleVersionOption = + bundleVersionsDiffer && + taskInstanceDagVersionBundleVersion !== null && + taskInstanceDagVersionBundleVersion !== ""; + return ( - Clear Task Instance: {taskInstance.task_display_name}{" "} - @@ -103,19 +127,49 @@ const ClearTaskInstanceDialog = ({ onClose, open, taskInstance }: Props) => { - + + {shouldShowBundleVersionOption ? ( + setRunOnLatestVersion(Boolean(event.checked))} + > + {translate("dags:runAndTaskActions.options.runOnLatestVersion")} + + ) : undefined} diff --git a/airflow-core/src/airflow/ui/src/components/Clear/columns.tsx b/airflow-core/src/airflow/ui/src/components/Clear/columns.tsx deleted file mode 100644 index b09a18ff4ab03..0000000000000 --- a/airflow-core/src/airflow/ui/src/components/Clear/columns.tsx +++ /dev/null @@ -1,66 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { Link } from "@chakra-ui/react"; -import type { ColumnDef } from "@tanstack/react-table"; -import { Link as RouterLink } from "react-router-dom"; - -import type { TaskInstanceResponse } from "openapi/requests/types.gen"; -import { Tooltip } from "src/components/ui"; -import { getTaskInstanceLink } from "src/utils/links"; -import { trimText } from "src/utils/trimTextFn"; - -import { StateBadge } from "../StateBadge"; - -export const columns: Array> = [ - { - accessorKey: "task_display_name", - cell: ({ row: { original } }) => ( - - - - {trimText(original.task_display_name, 25).trimmedText} - - - - ), - enableSorting: false, - header: "Task ID", - }, - { - accessorKey: "state", - cell: ({ - row: { - original: { state }, - }, - }) => {state}, - enableSorting: false, - header: () => "State", - }, - { - accessorKey: "rendered_map_index", - enableSorting: false, - header: "Map Index", - }, - - { - accessorKey: "dag_run_id", - enableSorting: false, - header: "Run Id", - }, -]; diff --git a/airflow-core/src/airflow/ui/src/components/ConfigForm.tsx b/airflow-core/src/airflow/ui/src/components/ConfigForm.tsx new file mode 100644 index 0000000000000..791ffd30ac95d --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/ConfigForm.tsx @@ -0,0 +1,126 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Accordion, Box, Field } from "@chakra-ui/react"; +import { type Control, type FieldValues, type Path, Controller } from "react-hook-form"; +import { useTranslation } from "react-i18next"; + +import type { ParamsSpec } from "src/queries/useDagParams"; +import { useParamStore } from "src/queries/useParamStore"; + +import { FlexibleForm, flexibleFormDefaultSection } from "./FlexibleForm"; +import { JsonEditor } from "./JsonEditor"; + +type ConfigFormProps = { + readonly children?: React.ReactNode; + readonly control: Control; + readonly errors: { + conf?: string; + date?: unknown; + }; + readonly initialParamsDict: { paramsDict: ParamsSpec }; + readonly setErrors: React.Dispatch< + React.SetStateAction<{ + conf?: string; + date?: unknown; + }> + >; + readonly setFormError: (error: boolean) => void; +}; + +const ConfigForm = ({ + children, + control, + errors, + initialParamsDict, + setErrors, + setFormError, +}: ConfigFormProps) => { + const { t: translate } = useTranslation(["components", "common"]); + const { conf, setConf } = useParamStore(); + + const validateAndPrettifyJson = (value: string) => { + try { + const parsedJson = JSON.parse(value) as JSON; + + setErrors((prev) => ({ ...prev, conf: undefined })); + + const formattedJson = JSON.stringify(parsedJson, undefined, 2); + + if (formattedJson !== conf) { + setConf(formattedJson); // Update only if the value is different + } + + return formattedJson; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : translate("common:error.unknown"); + + setErrors((prev) => ({ + ...prev, + conf: translate("configForm.invalidJson", { errorMessage }), + })); + + return value; + } + }; + + return ( + + + + + {translate("configForm.advancedOptions")} + + + + {children} + } + render={({ field }) => ( + + {translate("configForm.configJson")} + { + field.onChange(validateAndPrettifyJson(field.value as string)); + }} + /> + {Boolean(errors.conf) ? {errors.conf} : undefined} + + )} + /> + + + + + ); +}; + +export default ConfigForm; diff --git a/airflow-core/src/airflow/ui/src/components/ConfirmationModal.tsx b/airflow-core/src/airflow/ui/src/components/ConfirmationModal.tsx index bac6e3d1f0979..43c926e70c0a1 100644 --- a/airflow-core/src/airflow/ui/src/components/ConfirmationModal.tsx +++ b/airflow-core/src/airflow/ui/src/components/ConfirmationModal.tsx @@ -17,6 +17,7 @@ * under the License. */ import { Button, type DialogBodyProps, Heading } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; import { Dialog } from "src/components/ui"; @@ -28,32 +29,36 @@ type Props = { readonly open: boolean; }; -export const ConfirmationModal = ({ children, header, onConfirm, onOpenChange, open }: Props) => ( - - - - {header} - +export const ConfirmationModal = ({ children, header, onConfirm, onOpenChange, open }: Props) => { + const { t: translate } = useTranslation("common"); - + return ( + + + + {header} + - {children} - - - + + - - - - - -); + + + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/DeleteDagButton.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/DeleteDagButton.tsx new file mode 100644 index 0000000000000..61b4a4af25734 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/DagActions/DeleteDagButton.tsx @@ -0,0 +1,71 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box, type ButtonProps, useDisclosure } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; +import { FiTrash2 } from "react-icons/fi"; +import { useNavigate } from "react-router-dom"; + +import DeleteDialog from "src/components/DeleteDialog"; +import ActionButton from "src/components/ui/ActionButton"; +import { useDeleteDag } from "src/queries/useDeleteDag"; + +type DeleteDagButtonProps = { + readonly dagDisplayName: string; + readonly dagId: string; + readonly withText?: boolean; +} & ButtonProps; + +const DeleteDagButton = ({ dagDisplayName, dagId, width, withText = true }: DeleteDagButtonProps) => { + const { onClose, onOpen, open } = useDisclosure(); + const navigate = useNavigate(); + const { t: translate } = useTranslation("dags"); + const { isPending, mutate: deleteDag } = useDeleteDag({ + dagId, + onSuccessConfirm: () => { + onClose(); + navigate("/dags"); + }, + }); + + return ( + + } + onClick={onOpen} + text={translate("dagActions.delete.button")} + width={width} + withText={withText} + /> + + deleteDag({ dagId })} + open={open} + resourceName={dagDisplayName} + title={translate("dagActions.delete.button")} + warningText={translate("dagActions.delete.warning")} + /> + + ); +}; + +export default DeleteDagButton; diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/FavoriteDagButton.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/FavoriteDagButton.tsx new file mode 100644 index 0000000000000..8f0960fdc2a14 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/DagActions/FavoriteDagButton.tsx @@ -0,0 +1,64 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box } from "@chakra-ui/react"; +import { useCallback, useMemo } from "react"; +import { useTranslation } from "react-i18next"; +import { FiStar } from "react-icons/fi"; + +import { useDagServiceGetDagsUi } from "openapi/queries"; +import { useFavoriteDag } from "src/queries/useFavoriteDag"; +import { useUnfavoriteDag } from "src/queries/useUnfavoriteDag"; + +import ActionButton from "../ui/ActionButton"; + +type FavoriteDagButtonProps = { + readonly dagId: string; + readonly withText?: boolean; +}; + +export const FavoriteDagButton = ({ dagId, withText = true }: FavoriteDagButtonProps) => { + const { t: translate } = useTranslation("dags"); + const { data: favorites } = useDagServiceGetDagsUi({ isFavorite: true }); + + const isFavorite = useMemo( + () => favorites?.dags.some((fav) => fav.dag_id === dagId) ?? false, + [favorites, dagId], + ); + + const { mutate: favoriteDag } = useFavoriteDag(); + const { mutate: unfavoriteDag } = useUnfavoriteDag(); + + const onToggle = useCallback(() => { + const mutationFn = isFavorite ? unfavoriteDag : favoriteDag; + + mutationFn({ dagId }); + }, [dagId, isFavorite, favoriteDag, unfavoriteDag]); + + return ( + + } + onClick={onToggle} + text={isFavorite ? translate("unfavoriteDag") : translate("favoriteDag")} + withText={withText} + /> + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/ParseDag.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/ParseDag.tsx index be540980699b3..c2fdbf6440c79 100644 --- a/airflow-core/src/airflow/ui/src/components/DagActions/ParseDag.tsx +++ b/airflow-core/src/airflow/ui/src/components/DagActions/ParseDag.tsx @@ -16,6 +16,8 @@ * specific language governing permissions and limitations * under the License. */ +import type { ButtonProps } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; import { AiOutlineFileSync } from "react-icons/ai"; import { Button } from "src/components/ui"; @@ -24,20 +26,22 @@ import { useDagParsing } from "src/queries/useDagParsing.ts"; type Props = { readonly dagId: string; readonly fileToken: string; -}; +} & ButtonProps; -const ParseDag = ({ dagId, fileToken }: Props) => { +const ParseDag = ({ dagId, fileToken, ...rest }: Props) => { + const { t: translate } = useTranslation("components"); const { isPending, mutate } = useDagParsing({ dagId }); return ( ); }; diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillButton.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillButton.tsx deleted file mode 100644 index 90b43d106eca9..0000000000000 --- a/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillButton.tsx +++ /dev/null @@ -1,45 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { useDisclosure } from "@chakra-ui/react"; -import { RiArrowGoBackFill } from "react-icons/ri"; - -import type { DAGResponse, DAGWithLatestDagRunsResponse } from "openapi/requests/types.gen"; - -import { Button } from "../ui"; -import RunBackfillModal from "./RunBackfillModal"; - -type Props = { - readonly dag: DAGResponse | DAGWithLatestDagRunsResponse; -}; - -const RunBackfillButton: React.FC = ({ dag }) => { - const { onClose, onOpen, open } = useDisclosure(); - - return ( - <> - - - - ); -}; - -export default RunBackfillButton; diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillForm.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillForm.tsx index 1bbacf6dcede2..cb50113ec82a0 100644 --- a/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillForm.tsx +++ b/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillForm.tsx @@ -17,19 +17,27 @@ * under the License. */ import { Input, Box, Spacer, HStack, Field, VStack, Flex, Text } from "@chakra-ui/react"; +import dayjs from "dayjs"; import { useEffect, useState } from "react"; import { useForm, Controller, useWatch } from "react-hook-form"; +import { useTranslation } from "react-i18next"; import type { DAGResponse, DAGWithLatestDagRunsResponse, BackfillPostBody } from "openapi/requests/types.gen"; -import { Alert, Button } from "src/components/ui"; +import { Button } from "src/components/ui"; import { reprocessBehaviors } from "src/constants/reprocessBehaviourParams"; import { useCreateBackfill } from "src/queries/useCreateBackfill"; import { useCreateBackfillDryRun } from "src/queries/useCreateBackfillDryRun"; +import { useDagParams } from "src/queries/useDagParams"; +import { useParamStore } from "src/queries/useParamStore"; import { useTogglePause } from "src/queries/useTogglePause"; +import ConfigForm from "../ConfigForm"; +import { DateTimeInput } from "../DateTimeInput"; import { ErrorAlert } from "../ErrorAlert"; +import type { DagRunTriggerParams } from "../TriggerDag/TriggerDAGForm"; import { Checkbox } from "../ui/Checkbox"; import { RadioCardItem, RadioCardLabel, RadioCardRoot } from "../ui/RadioCard"; +import { getInlineMessage } from "./inlineMessage"; type RunBackfillFormProps = { readonly dag: DAGResponse | DAGWithLatestDagRunsResponse; @@ -37,26 +45,30 @@ type RunBackfillFormProps = { }; const today = new Date().toISOString().slice(0, 16); +type BackfillFormProps = DagRunTriggerParams & Omit; + const RunBackfillForm = ({ dag, onClose }: RunBackfillFormProps) => { + const { t: translate } = useTranslation("components"); const [errors, setErrors] = useState<{ conf?: string; date?: unknown }>({}); const [unpause, setUnpause] = useState(true); - - const { control, handleSubmit, reset, watch } = useForm({ + const [formError, setFormError] = useState(false); + const initialParamsDict = useDagParams(dag.dag_id, true); + const { conf } = useParamStore(); + const { control, handleSubmit, reset, watch } = useForm({ defaultValues: { + conf, dag_id: dag.dag_id, - dag_run_conf: {}, from_date: "", max_active_runs: 1, - reprocess_behavior: "failed", + reprocess_behavior: "none", run_backwards: false, to_date: "", }, mode: "onBlur", }); - const values = useWatch({ + const values = useWatch({ control, }); - const { data, isPending: isPendingDryRun } = useCreateBackfillDryRun({ requestBody: { requestBody: { @@ -70,9 +82,7 @@ const RunBackfillForm = ({ dag, onClose }: RunBackfillFormProps) => { }, }, }); - const { mutate: togglePause } = useTogglePause({ dagId: dag.dag_id }); - const { createBackfill, dateValidationError, error, isPending } = useCreateBackfill({ onSuccessConfirm: onClose, }); @@ -83,10 +93,21 @@ const RunBackfillForm = ({ dag, onClose }: RunBackfillFormProps) => { } }, [dateValidationError]); + useEffect(() => { + if (conf) { + reset((prevValues) => ({ + ...prevValues, + conf, + })); + } + }, [conf, reset]); + const dataIntervalStart = watch("from_date"); const dataIntervalEnd = watch("to_date"); + const noDataInterval = !Boolean(dataIntervalStart) || !Boolean(dataIntervalEnd); + const dataIntervalInvalid = dayjs(dataIntervalStart).isAfter(dayjs(dataIntervalEnd)); - const onSubmit = (fdata: BackfillPostBody) => { + const onSubmit = (fdata: BackfillFormProps) => { if (unpause && dag.is_paused) { togglePause({ dagId: dag.dag_id, @@ -96,11 +117,14 @@ const RunBackfillForm = ({ dag, onClose }: RunBackfillFormProps) => { }); } createBackfill({ - requestBody: fdata, + requestBody: { + ...fdata, + dag_run_conf: JSON.parse(fdata.conf) as Record, + }, }); }; - const onCancel = (fdata: BackfillPostBody) => { + const onCancel = (fdata: BackfillFormProps) => { reset(fdata); onClose(); }; @@ -114,26 +138,25 @@ const RunBackfillForm = ({ dag, onClose }: RunBackfillFormProps) => { total_entries: 0, }; + const inlineMessage = getInlineMessage(isPendingDryRun, affectedTasks.total_entries, translate); + return ( <> - + + - - Date Range + + {translate("backfill.dateRange")} - + ( - - + + {translate("backfill.dateRangeFrom")} + + {translate("backfill.errorStartDateBeforeEndDate")} )} /> @@ -141,20 +164,15 @@ const RunBackfillForm = ({ dag, onClose }: RunBackfillFormProps) => { control={control} name="to_date" render={({ field }) => ( - - + + {translate("backfill.dateRangeTo")} + )} /> + {noDataInterval || dataIntervalInvalid ? undefined : {inlineMessage}} { field.onChange(event); }} > - Reprocess Behaviour + + {translate("backfill.reprocessBehavior")} + {reprocessBehaviors.map((item) => ( ))} @@ -182,16 +202,6 @@ const RunBackfillForm = ({ dag, onClose }: RunBackfillFormProps) => { )} /> - ( - - Run Backwards - - )} - /> - { type="number" width={24} /> - Max Active Runs + {translate("backfill.maxRuns")} )} /> - {affectedTasks.total_entries > 0 ? ( - {affectedTasks.total_entries} runs will be triggered - ) : ( - No runs matching selected criteria. - )} + ( + + {translate("backfill.backwards")} + + )} + /> + + {dag.is_paused ? ( + <> + setUnpause(!unpause)} + wordBreak="break-all" + > + {translate("backfill.unpause", { dag_display_name: dag.dag_display_name })} + + + + ) : undefined} + + - {dag.is_paused ? ( - setUnpause(!unpause)}> - Unpause {dag.dag_display_name} on trigger - - ) : undefined} - - + diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillModal.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillModal.tsx deleted file mode 100644 index 8e48fd60268cd..0000000000000 --- a/airflow-core/src/airflow/ui/src/components/DagActions/RunBackfillModal.tsx +++ /dev/null @@ -1,47 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { Heading } from "@chakra-ui/react"; -import React from "react"; - -import type { DAGResponse, DAGWithLatestDagRunsResponse } from "openapi/requests/types.gen"; -import { Dialog } from "src/components/ui"; - -import RunBackfillForm from "./RunBackfillForm"; - -type RunBackfillModalProps = { - readonly dag: DAGResponse | DAGWithLatestDagRunsResponse; - readonly onClose: () => void; - readonly open: boolean; -}; - -const RunBackfillModal: React.FC = ({ dag, onClose, open }) => ( - - - - Run Backfill - - - - - - - -); - -export default RunBackfillModal; diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/inlineMessage.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/inlineMessage.tsx new file mode 100644 index 0000000000000..13d80c19acd53 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/DagActions/inlineMessage.tsx @@ -0,0 +1,33 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Text, Skeleton } from "@chakra-ui/react"; +import type { TFunction } from "i18next"; + +export const getInlineMessage = (isPendingDryRun: boolean, totalEntries: number, translate: TFunction) => + isPendingDryRun ? ( + + ) : totalEntries === 0 ? ( + + {translate("backfill.affectedNone")} + + ) : ( + + {translate("backfill.affected", { count: totalEntries })} + + ); diff --git a/airflow-core/src/airflow/ui/src/components/DagRunInfo.tsx b/airflow-core/src/airflow/ui/src/components/DagRunInfo.tsx index 80742a3868747..83d17de18eed7 100644 --- a/airflow-core/src/airflow/ui/src/components/DagRunInfo.tsx +++ b/airflow-core/src/airflow/ui/src/components/DagRunInfo.tsx @@ -17,12 +17,13 @@ * under the License. */ import { VStack, Text, Box } from "@chakra-ui/react"; -import dayjs from "dayjs"; +import { useTranslation } from "react-i18next"; import type { DAGRunResponse } from "openapi/requests/types.gen"; import { StateBadge } from "src/components/StateBadge"; import Time from "src/components/Time"; import { Tooltip } from "src/components/ui"; +import { getDuration } from "src/utils"; type Props = { readonly endDate?: string | null; @@ -32,37 +33,47 @@ type Props = { readonly state?: DAGRunResponse["state"]; }; -const DagRunInfo = ({ endDate, logicalDate, runAfter, startDate, state }: Props) => ( - - {state === undefined ? undefined : State: {state}} - {Boolean(logicalDate) ? ( - - Logical Date: - ) : undefined} - {Boolean(startDate) ? ( - - Start Date: - ) : undefined} - {Boolean(endDate) ? ( - - End Date: - ) : undefined} - {Boolean(startDate) ? ( - Duration: {dayjs.duration(dayjs(endDate).diff(startDate)).asSeconds()}s - ) : undefined} - - } - > - - - -); +const DagRunInfo = ({ endDate, logicalDate, runAfter, startDate, state }: Props) => { + const { t: translate } = useTranslation("common"); + + return ( + + {state === undefined ? undefined : ( + + {translate("state")}: {state} + + )} + {Boolean(logicalDate) ? ( + + {translate("logicalDate")}: + ) : undefined} + {Boolean(startDate) ? ( + + {translate("startDate")}: + ) : undefined} + {Boolean(endDate) ? ( + + {translate("endDate")}: + ) : undefined} + {Boolean(startDate) ? ( + + {translate("duration")}: {getDuration(startDate, endDate)} + + ) : undefined} + + } + > + + + + ); +}; export default DagRunInfo; diff --git a/airflow-core/src/airflow/ui/src/components/DagVersion.tsx b/airflow-core/src/airflow/ui/src/components/DagVersion.tsx index 68174bfceae01..2c65c3c09ebd3 100644 --- a/airflow-core/src/airflow/ui/src/components/DagVersion.tsx +++ b/airflow-core/src/airflow/ui/src/components/DagVersion.tsx @@ -17,6 +17,7 @@ * under the License. */ import { Text } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; import type { DagVersionResponse } from "openapi/requests/types.gen"; @@ -24,13 +25,15 @@ import Time from "./Time"; import { Tooltip } from "./ui"; export const DagVersion = ({ version }: { readonly version: DagVersionResponse | null | undefined }) => { + const { t: translate } = useTranslation("components"); + if (version === null || version === undefined) { return undefined; } return ( }> - v{version.version_number} + {translate("versionSelect.versionCode", { versionCode: version.version_number })} ); }; diff --git a/airflow-core/src/airflow/ui/src/components/DagVersionDetails.tsx b/airflow-core/src/airflow/ui/src/components/DagVersionDetails.tsx index 9d3254a44579b..00e8de73de2c9 100644 --- a/airflow-core/src/airflow/ui/src/components/DagVersionDetails.tsx +++ b/airflow-core/src/airflow/ui/src/components/DagVersionDetails.tsx @@ -17,11 +17,14 @@ * under the License. */ import { Link, Table } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; import type { DagVersionResponse } from "openapi/requests/types.gen"; import Time from "src/components/Time"; export const DagVersionDetails = ({ dagVersion }: { readonly dagVersion?: DagVersionResponse | null }) => { + const { t: translate } = useTranslation("components"); + if (dagVersion === null || dagVersion === undefined) { return undefined; } @@ -30,29 +33,29 @@ export const DagVersionDetails = ({ dagVersion }: { readonly dagVersion?: DagVer - Version ID + {translate("versionDetails.versionId")} {dagVersion.id} - Bundle Name + {translate("versionDetails.bundleName")} {dagVersion.bundle_name} {dagVersion.bundle_version === null ? undefined : ( - Bundle Version + {translate("versionDetails.bundleVersion")} {dagVersion.bundle_version} )} {dagVersion.bundle_url === null ? undefined : ( - Bundle Link + {translate("versionDetails.bundleLink")} {dagVersion.bundle_url} )} - Created At + {translate("versionDetails.createdAt")} diff --git a/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx b/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx index 0517b54d042c0..5e386a7c484b5 100644 --- a/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx +++ b/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx @@ -18,6 +18,7 @@ */ import { createListCollection, Flex, Select, type SelectValueChangeDetails, Text } from "@chakra-ui/react"; import { useCallback, useMemo } from "react"; +import { useTranslation } from "react-i18next"; import { useParams, useSearchParams } from "react-router-dom"; import { useDagVersionServiceGetDagVersions } from "openapi/queries"; @@ -33,16 +34,12 @@ type VersionSelected = { }; export const DagVersionSelect = ({ showLabel = true }: { readonly showLabel?: boolean }) => { + const { t: translate } = useTranslation("components"); const { dagId = "" } = useParams(); - - const { data, isLoading } = useDagVersionServiceGetDagVersions({ dagId, orderBy: "-version_number" }); - + const { data, isLoading } = useDagVersionServiceGetDagVersions({ dagId, orderBy: ["-version_number"] }); const [searchParams, setSearchParams] = useSearchParams(); - const selectedVersionNumber = useSelectedVersion(); - const selectedVersion = data?.dag_versions.find((dv) => dv.version_number === selectedVersionNumber); - const versionOptions = useMemo( () => createListCollection({ @@ -50,7 +47,6 @@ export const DagVersionSelect = ({ showLabel = true }: { readonly showLabel?: bo }), [data], ); - const handleStateChange = useCallback( ({ items }: SelectValueChangeDetails) => { if (items[0]) { @@ -71,13 +67,17 @@ export const DagVersionSelect = ({ showLabel = true }: { readonly showLabel?: bo value={selectedVersionNumber === undefined ? [] : [selectedVersionNumber.toString()]} width="250px" > - {showLabel ? Dag Version : undefined} + {showLabel ? ( + {translate("versionSelect.dagVersion")} + ) : undefined} {selectedVersion === undefined ? undefined : ( - v{selectedVersion.version_number} + + {translate("versionSelect.versionCode", { versionCode: selectedVersion.version_number })} + )} @@ -91,7 +91,9 @@ export const DagVersionSelect = ({ showLabel = true }: { readonly showLabel?: bo {versionOptions.items.map((option) => ( - v{option.version.version_number} + + {translate("versionSelect.versionCode", { versionCode: option.version.version_number })} + ))} diff --git a/airflow-core/src/airflow/ui/src/components/DataTable/DataTable.tsx b/airflow-core/src/airflow/ui/src/components/DataTable/DataTable.tsx index 785f04c0d6837..ff625191d1385 100644 --- a/airflow-core/src/airflow/ui/src/components/DataTable/DataTable.tsx +++ b/airflow-core/src/airflow/ui/src/components/DataTable/DataTable.tsx @@ -30,6 +30,7 @@ import { type Updater, } from "@tanstack/react-table"; import React, { type ReactNode, useCallback, useRef, useState } from "react"; +import { useTranslation } from "react-i18next"; import { CardList } from "src/components/DataTable/CardList"; import { TableList } from "src/components/DataTable/TableList"; @@ -75,6 +76,7 @@ export const DataTable = ({ skeletonCount = 10, total = 0, }: DataTableProps) => { + const { t: translate } = useTranslation(["common"]); const ref = useRef<{ tableRef: TanStackTable | undefined }>({ tableRef: undefined, }); @@ -142,7 +144,11 @@ export const DataTable = ({ {hasRows && display === "card" && cardDef !== undefined ? ( ) : undefined} - {!hasRows && !Boolean(isLoading) && {noRowsMessage ?? `No ${modelName}s found.`}} + {!hasRows && !Boolean(isLoading) && ( + + {noRowsMessage ?? translate("noItemsFound", { modelName })} + + )} {hasPagination ? ( = { readonly table: Table; }; -const FilterMenuButton = ({ table }: Props) => ( - - - - - - - - {table.getAllLeafColumns().map((column) => { - const text = flexRender(column.columnDef.header, { - column, - header: { column } as Header, - table, - }); +const FilterMenuButton = ({ table }: Props) => { + const { t: translate } = useTranslation("common"); + const filterLabel = translate("table.filterColumns"); - return text?.toString ? ( - - { - column.toggleVisibility(); - }} - > - {text} - - - ) : undefined; - })} - - -); + return ( + + + + + + + + {table + .getAllLeafColumns() + .filter((column) => column.getCanHide()) + .map((column) => { + const text = flexRender(column.columnDef.header, { + column, + header: { column } as Header, + table, + }); + + return text?.toString ? ( + + { + column.toggleVisibility(); + }} + > + {text} + + + ) : undefined; + })} + + + ); +}; export default FilterMenuButton; diff --git a/airflow-core/src/airflow/ui/src/components/DataTable/TableList.tsx b/airflow-core/src/airflow/ui/src/components/DataTable/TableList.tsx index ab5f8c3c70cbe..4b9574f653127 100644 --- a/airflow-core/src/airflow/ui/src/components/DataTable/TableList.tsx +++ b/airflow-core/src/airflow/ui/src/components/DataTable/TableList.tsx @@ -19,6 +19,7 @@ import { Button, Table } from "@chakra-ui/react"; import { flexRender, type Row, type Table as TanStackTable } from "@tanstack/react-table"; import React, { Fragment } from "react"; +import { useTranslation } from "react-i18next"; import { TiArrowSortedDown, TiArrowSortedUp, TiArrowUnsorted } from "react-icons/ti"; import FilterMenuButton from "./FilterMenuButton"; @@ -29,75 +30,81 @@ type DataTableProps = { readonly table: TanStackTable; }; -export const TableList = ({ allowFiltering, renderSubComponent, table }: DataTableProps) => ( - - - {table.getHeaderGroups().map((headerGroup) => ( - - {headerGroup.headers.map(({ colSpan, column, getContext, id, isPlaceholder }, index) => { - const sort = column.getIsSorted(); - const canSort = column.getCanSort(); - const text = flexRender(column.columnDef.header, getContext()); - let rightIcon; +export const TableList = ({ allowFiltering, renderSubComponent, table }: DataTableProps) => { + const { t: translate } = useTranslation("components"); - const showFilters = allowFiltering && index === headerGroup.headers.length - 1; + return ( + + + {table.getHeaderGroups().map((headerGroup) => ( + + {headerGroup.headers.map(({ colSpan, column, getContext, id, isPlaceholder }, index) => { + const sort = column.getIsSorted(); + const canSort = column.getCanSort(); + const text = flexRender(column.columnDef.header, getContext()); + let rightIcon; - if (canSort) { - if (sort === "desc") { - rightIcon = ; - } else if (sort === "asc") { - rightIcon = ; - } else { - rightIcon = ; + const showFilters = allowFiltering && index === headerGroup.headers.length - 1; + + if (canSort) { + if (sort === "desc") { + rightIcon = ; + } else if (sort === "asc") { + rightIcon = ; + } else { + rightIcon = ; + } + + return ( + + {isPlaceholder ? undefined : ( + + )} + {showFilters ? : undefined} + + ); } return ( - {isPlaceholder ? undefined : ( - - )} + {isPlaceholder ? undefined : text} {showFilters ? : undefined} ); - } - - return ( - - {isPlaceholder ? undefined : text} - {showFilters ? : undefined} - - ); - })} - - ))} - - - {table.getRowModel().rows.map((row) => ( - - - {/* first row is a normal row */} - {row.getVisibleCells().map((cell) => ( - - {flexRender(cell.column.columnDef.cell, cell.getContext())} - - ))} + })} - {row.getIsExpanded() && ( + ))} + + + {table.getRowModel().rows.map((row) => ( + - {/* 2nd row is a custom 1 cell row */} - {renderSubComponent?.({ row })} + {/* first row is a normal row */} + {row.getVisibleCells().map((cell) => ( + + {flexRender(cell.column.columnDef.cell, cell.getContext())} + + ))} - )} - - ))} - - -); + {row.getIsExpanded() && ( + + {/* 2nd row is a custom 1 cell row */} + + {renderSubComponent?.({ row })} + + + )} + + ))} + + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx b/airflow-core/src/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx index e5103686f1bf5..0f1cdcb3b57ef 100644 --- a/airflow-core/src/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx +++ b/airflow-core/src/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx @@ -17,8 +17,11 @@ * under the License. */ import { HStack, IconButton } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; import { FiAlignJustify, FiGrid } from "react-icons/fi"; +import { Tooltip } from "src/components/ui"; + type Display = "card" | "table"; type Props = { @@ -26,35 +29,43 @@ type Props = { readonly setDisplay: (display: Display) => void; }; -export const ToggleTableDisplay = ({ display, setDisplay }: Props) => ( - - setDisplay("card")} - width={8} - > - - - setDisplay("table")} - width={8} - > - - - -); +export const ToggleTableDisplay = ({ display, setDisplay }: Props) => { + const { t: translate } = useTranslation("components"); + + return ( + + + setDisplay("card")} + width={8} + > + + + + + setDisplay("table")} + width={8} + > + + + + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/components/DataTable/useTableUrlState.ts b/airflow-core/src/airflow/ui/src/components/DataTable/useTableUrlState.ts index 286882bd02528..80b23a294e730 100644 --- a/airflow-core/src/airflow/ui/src/components/DataTable/useTableUrlState.ts +++ b/airflow-core/src/airflow/ui/src/components/DataTable/useTableUrlState.ts @@ -18,6 +18,8 @@ */ import { useCallback, useMemo } from "react"; import { useSearchParams } from "react-router-dom"; +import { useLocation } from "react-router-dom"; +import { useLocalStorage } from "usehooks-ts"; import { useConfig } from "src/queries/useConfig"; @@ -26,6 +28,13 @@ import type { TableState } from "./types"; export const useTableURLState = (defaultState?: Partial) => { const [searchParams, setSearchParams] = useSearchParams(); + const location = useLocation(); + const pageName = location.pathname; + + const [sorting, setSorting] = useLocalStorage( + `${pageName.replaceAll("/", "-").slice(1)}-table-sort`, + [], + ); const pageSize = useConfig("page_size") as number; @@ -34,7 +43,7 @@ export const useTableURLState = (defaultState?: Partial) => { pageIndex: 0, pageSize, }, - sorting: [], + sorting, } as const satisfies TableState; const handleStateChange = useCallback( @@ -42,8 +51,9 @@ export const useTableURLState = (defaultState?: Partial) => { setSearchParams(stateToSearchParams(state, defaultTableState), { replace: true, }); + setSorting(state.sorting); }, - [setSearchParams, defaultTableState], + [setSearchParams, defaultTableState, setSorting], ); const tableURLState = useMemo( diff --git a/airflow-core/src/airflow/ui/src/components/DateTimeInput.tsx b/airflow-core/src/airflow/ui/src/components/DateTimeInput.tsx new file mode 100644 index 0000000000000..48c7a41099e09 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/DateTimeInput.tsx @@ -0,0 +1,58 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Input, type InputProps } from "@chakra-ui/react"; +import dayjs from "dayjs"; +import tz from "dayjs/plugin/timezone"; +import { forwardRef } from "react"; + +import { useTimezone } from "src/context/timezone"; + +dayjs.extend(tz); + +type Props = { + readonly value: string; +} & InputProps; + +export const DateTimeInput = forwardRef(({ onChange, value, ...rest }, ref) => { + const { selectedTimezone } = useTimezone(); + + // Make the value timezone-aware + const date = dayjs(value).tz(selectedTimezone).format("YYYY-MM-DDTHH:mm:ss.SSS"); + + return ( + + onChange?.({ + ...event, + target: { + ...event.target, + // Return a timezone-aware ISO string + value: dayjs(event.target.value).isValid() + ? dayjs(event.target.value).tz(selectedTimezone, true).toISOString() + : "", + }, + }) + } + ref={ref} + type="datetime-local" + value={date} + {...rest} + /> + ); +}); diff --git a/airflow-core/src/airflow/ui/src/components/DeleteDialog.tsx b/airflow-core/src/airflow/ui/src/components/DeleteDialog.tsx new file mode 100644 index 0000000000000..01db1ada889d1 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/DeleteDialog.tsx @@ -0,0 +1,78 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Text, Heading, HStack } from "@chakra-ui/react"; +import React from "react"; +import { useTranslation } from "react-i18next"; +import { FiTrash2 } from "react-icons/fi"; + +import { Button, Dialog } from "src/components/ui"; + +type DeleteDialogProps = { + readonly deleteButtonText?: string; + readonly isDeleting: boolean; + readonly onClose: () => void; + readonly onDelete: () => void; + readonly open: boolean; + readonly resourceName: string; + readonly title: string; + readonly warningText: string; +}; + +const DeleteDialog: React.FC = ({ + deleteButtonText, + isDeleting, + onClose, + onDelete, + open, + resourceName, + title, + warningText, +}) => { + const { t: translate } = useTranslation("common"); + + return ( + + + + {title} + + + + {translate("modal.delete.confirmation", { resourceName })} + + {warningText} + + + + + + + + + + + ); +}; + +export default DeleteDialog; diff --git a/airflow-core/src/airflow/ui/src/components/DurationChart.tsx b/airflow-core/src/airflow/ui/src/components/DurationChart.tsx index 6eefcd480057a..190ee70d496e6 100644 --- a/airflow-core/src/airflow/ui/src/components/DurationChart.tsx +++ b/airflow-core/src/airflow/ui/src/components/DurationChart.tsx @@ -31,11 +31,11 @@ import type { PartialEventContext } from "chartjs-plugin-annotation"; import annotationPlugin from "chartjs-plugin-annotation"; import dayjs from "dayjs"; import { Bar } from "react-chartjs-2"; +import { useTranslation } from "react-i18next"; +import { useNavigate } from "react-router-dom"; -import type { TaskInstanceResponse, DAGRunResponse } from "openapi/requests/types.gen"; +import type { TaskInstanceResponse, GridRunsResponse } from "openapi/requests/types.gen"; import { system } from "src/theme"; -import { pluralize } from "src/utils"; -import { getDuration } from "src/utils/datetime_utils"; ChartJS.register( CategoryScale, @@ -54,7 +54,9 @@ const average = (ctx: PartialEventContext, index: number) => { return values === undefined ? 0 : values.reduce((initial, next) => initial + next, 0) / values.length; }; -type RunResponse = DAGRunResponse | TaskInstanceResponse; +type RunResponse = GridRunsResponse | TaskInstanceResponse; + +const getDuration = (start: string, end: string | null) => dayjs.duration(dayjs(end).diff(start)).asSeconds(); export const DurationChart = ({ entries, @@ -63,6 +65,9 @@ export const DurationChart = ({ readonly entries: Array | undefined; readonly kind: "Dag Run" | "Task Instance"; }) => { + const { t: translate } = useTranslation(["components", "common"]); + const navigate = useNavigate(); + if (!entries) { return undefined; } @@ -94,7 +99,13 @@ export const DurationChart = ({ return ( - Last {pluralize(kind, entries.length)} + {entries.length > 1 + ? kind === "Dag Run" + ? translate("durationChart.lastDagRun_other", { count: entries.length }) + : translate("durationChart.lastTaskInstance_other", { count: entries.length }) + : kind === "Dag Run" + ? translate("durationChart.lastDagRun_one") + : translate("durationChart.lastTaskInstance_one")} { switch (kind) { case "Dag Run": { - const run = entry as DAGRunResponse; + const run = entry as GridRunsResponse; return run.queued_at !== null && run.start_date !== null && run.queued_at < run.start_date ? Number(getDuration(run.queued_at, run.start_date)) @@ -123,7 +134,7 @@ export const DurationChart = ({ return 0; } }), - label: "Queued duration", + label: translate("durationChart.queuedDuration"), }, { backgroundColor: entries.map( @@ -133,13 +144,41 @@ export const DurationChart = ({ data: entries.map((entry: RunResponse) => entry.start_date === null ? 0 : Number(getDuration(entry.start_date, entry.end_date)), ), - label: "Run duration", + label: translate("durationChart.runDuration"), }, ], labels: entries.map((entry: RunResponse) => dayjs(entry.run_after).format("YYYY-MM-DD, hh:mm:ss")), }} datasetIdKey="id" options={{ + onClick: (_event, elements) => { + const [element] = elements; + + if (!element) { + return; + } + + switch (kind) { + case "Dag Run": { + const entry = entries[element.index] as GridRunsResponse | undefined; + const baseUrl = `/dags/${entry?.dag_id}/runs/${entry?.run_id}`; + + navigate(baseUrl); + break; + } + case "Task Instance": { + const entry = entries[element.index] as TaskInstanceResponse | undefined; + const baseUrl = `/dags/${entry?.dag_id}/runs/${entry?.dag_run_id}`; + + navigate(`${baseUrl}/tasks/${entry?.task_id}`); + break; + } + default: + } + }, + onHover: (_event, elements, chart) => { + chart.canvas.style.cursor = elements.length > 0 ? "pointer" : "default"; + }, plugins: { annotation: { annotations: { @@ -155,11 +194,10 @@ export const DurationChart = ({ ticks: { maxTicksLimit: 3, }, - title: { align: "end", display: true, text: "Run After" }, + title: { align: "end", display: true, text: translate("common:dagRun.runAfter") }, }, - y: { - title: { align: "end", display: true, text: "Duration (seconds)" }, + title: { align: "end", display: true, text: translate("common:duration") }, }, }, }} diff --git a/airflow-core/src/airflow/ui/src/components/EditableMarkdownArea.tsx b/airflow-core/src/airflow/ui/src/components/EditableMarkdownArea.tsx new file mode 100644 index 0000000000000..a77bfcd617e93 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/EditableMarkdownArea.tsx @@ -0,0 +1,66 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box, VStack, Editable, Text } from "@chakra-ui/react"; +import type { ChangeEvent } from "react"; + +import ReactMarkdown from "./ReactMarkdown"; + +const EditableMarkdownArea = ({ + mdContent, + onBlur, + placeholder, + setMdContent, +}: { + readonly mdContent?: string | null; + readonly onBlur?: () => void; + readonly placeholder?: string | null; + readonly setMdContent: (value: string) => void; +}) => ( + + ) => setMdContent(event.target.value)} + value={mdContent ?? ""} + > + + {Boolean(mdContent) ? ( + {mdContent} + ) : ( + {placeholder} + )} + + + + +); + +export default EditableMarkdownArea; diff --git a/airflow-core/src/airflow/ui/src/components/EditableMarkdownButton.tsx b/airflow-core/src/airflow/ui/src/components/EditableMarkdownButton.tsx index ca9e5ab948574..3c3662d554b85 100644 --- a/airflow-core/src/airflow/ui/src/components/EditableMarkdownButton.tsx +++ b/airflow-core/src/airflow/ui/src/components/EditableMarkdownButton.tsx @@ -16,12 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -import { Box, Heading, VStack, Editable, Text, Flex } from "@chakra-ui/react"; -import { type ChangeEvent, type ReactElement, useState } from "react"; +import { Box, Heading, VStack, Flex } from "@chakra-ui/react"; +import { type ReactElement, useState } from "react"; +import { useTranslation } from "react-i18next"; import { Button, Dialog } from "src/components/ui"; -import ReactMarkdown from "./ReactMarkdown"; +import EditableMarkdownArea from "./EditableMarkdownArea"; import ActionButton from "./ui/ActionButton"; const EditableMarkdownButton = ({ @@ -30,6 +31,7 @@ const EditableMarkdownButton = ({ isPending, mdContent, onConfirm, + onOpen, placeholder, setMdContent, text, @@ -40,11 +42,13 @@ const EditableMarkdownButton = ({ readonly isPending: boolean; readonly mdContent?: string | null; readonly onConfirm: () => void; + readonly onOpen: () => void; readonly placeholder: string; readonly setMdContent: (value: string) => void; readonly text: string; readonly withText?: boolean; }) => { + const { t: translate } = useTranslation("common"); const [isOpen, setIsOpen] = useState(false); return ( @@ -52,7 +56,12 @@ const EditableMarkdownButton = ({ setIsOpen(true)} + onClick={() => { + if (!isOpen) { + onOpen(); + } + setIsOpen(true); + }} text={text} withText={withText} /> @@ -62,6 +71,7 @@ const EditableMarkdownButton = ({ onOpenChange={() => setIsOpen(false)} open={isOpen} size="md" + unmountOnExit={true} > @@ -69,34 +79,11 @@ const EditableMarkdownButton = ({ - ) => setMdContent(event.target.value)} - value={mdContent ?? ""} - > - - {Boolean(mdContent) ? ( - {mdContent} - ) : ( - {placeholder} - )} - - - - + diff --git a/airflow-core/src/airflow/ui/src/components/ErrorAlert.tsx b/airflow-core/src/airflow/ui/src/components/ErrorAlert.tsx index 06841c9545303..434bbfda23e52 100644 --- a/airflow-core/src/airflow/ui/src/components/ErrorAlert.tsx +++ b/airflow-core/src/airflow/ui/src/components/ErrorAlert.tsx @@ -16,13 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -import { HStack } from "@chakra-ui/react"; +import { HStack, Text } from "@chakra-ui/react"; import type { ApiError } from "openapi-gen/requests/core/ApiError"; import type { HTTPExceptionResponse, HTTPValidationError } from "openapi-gen/requests/types.gen"; import { Alert } from "./ui"; -type ExpandedApiError = { +export type ExpandedApiError = { body: HTTPExceptionResponse | HTTPValidationError | undefined; } & ApiError; @@ -44,10 +44,7 @@ export const ErrorAlert = ({ error: err }: Props) => { if (typeof details === "string") { detailMessage = details; } else if (Array.isArray(details)) { - detailMessage = details.map( - (detail) => ` - ${detail.loc.join(".")} ${detail.msg}`, - ); + detailMessage = details.map((detail) => `${detail.loc.join(".")} ${detail.msg}`); } else { detailMessage = Object.keys(details).map((key) => `${key}: ${details[key] as string}`); } @@ -57,7 +54,11 @@ export const ErrorAlert = ({ error: err }: Props) => { {error.status} {error.message} - {detailMessage === error.message ? undefined : {detailMessage}} + {detailMessage === error.message ? undefined : ( + + {detailMessage} + + )} ); diff --git a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldAdvancedArray.tsx b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldAdvancedArray.tsx index 80cb46dbdbc61..1d840aeca09c4 100644 --- a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldAdvancedArray.tsx +++ b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldAdvancedArray.tsx @@ -16,23 +16,21 @@ * specific language governing permissions and limitations * under the License. */ -import { Text } from "@chakra-ui/react"; -import { useState } from "react"; +import { useTranslation } from "react-i18next"; import { paramPlaceholder, useParamStore } from "src/queries/useParamStore"; import type { FlexibleFormElementProps } from "."; import { JsonEditor } from "../JsonEditor"; -export const FieldAdvancedArray = ({ name }: FlexibleFormElementProps) => { - const { paramsDict, setParamsDict } = useParamStore(); +export const FieldAdvancedArray = ({ name, namespace = "default", onUpdate }: FlexibleFormElementProps) => { + const { t: translate } = useTranslation("components"); + const { disabled, paramsDict, setParamsDict } = useParamStore(namespace); const param = paramsDict[name] ?? paramPlaceholder; - const [error, setError] = useState(undefined); // Determine the expected type based on schema const expectedType = param.schema.items?.type ?? "object"; const handleChange = (value: string) => { - setError(undefined); if (value === "") { if (paramsDict[name]) { // "undefined" values are removed from params, so we set it to null to avoid falling back to DAG defaults. @@ -45,18 +43,18 @@ export const FieldAdvancedArray = ({ name }: FlexibleFormElementProps) => { const parsedValue = JSON.parse(value) as unknown; if (!Array.isArray(parsedValue)) { - throw new TypeError("Value must be an array."); + throw new TypeError(translate("flexibleForm.validationErrorArrayNotArray")); } if (expectedType === "number" && !parsedValue.every((item) => typeof item === "number")) { // Ensure all elements in the array are numbers - throw new TypeError("All elements in the array must be numbers."); + throw new TypeError(translate("flexibleForm.validationErrorArrayNotNumbers")); } else if ( expectedType === "object" && !parsedValue.every((item) => typeof item === "object" && item !== null) ) { // Ensure all elements in the array are objects - throw new TypeError("All elements in the array must be objects."); + throw new TypeError(translate("flexibleForm.validationErrorArrayNotObject")); } if (paramsDict[name]) { @@ -64,24 +62,19 @@ export const FieldAdvancedArray = ({ name }: FlexibleFormElementProps) => { } setParamsDict(paramsDict); + onUpdate(String(parsedValue)); } catch (_error) { - setError(expectedType === "number" ? String(_error).replace("JSON", "Array") : _error); + onUpdate(undefined, expectedType === "number" ? String(_error).replace("JSON", "Array") : _error); } } }; return ( - <> - - {Boolean(error) ? ( - - {String(error)} - - ) : undefined} - + ); }; diff --git a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldBool.tsx b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldBool.tsx index 1d48249f7eaf6..f9a94253df1b6 100644 --- a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldBool.tsx +++ b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldBool.tsx @@ -21,8 +21,8 @@ import { paramPlaceholder, useParamStore } from "src/queries/useParamStore"; import type { FlexibleFormElementProps } from "."; import { Switch } from "../ui"; -export const FieldBool = ({ name }: FlexibleFormElementProps) => { - const { paramsDict, setParamsDict } = useParamStore(); +export const FieldBool = ({ name, namespace = "default" }: FlexibleFormElementProps) => { + const { disabled, paramsDict, setParamsDict } = useParamStore(namespace); const param = paramsDict[name] ?? paramPlaceholder; const onCheck = (value: boolean) => { if (paramsDict[name]) { @@ -36,6 +36,7 @@ export const FieldBool = ({ name }: FlexibleFormElementProps) => { onCheck(event.checked)} diff --git a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDateTime.tsx b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDateTime.tsx index a5deae64b9a7e..440932578d13e 100644 --- a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDateTime.tsx +++ b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDateTime.tsx @@ -21,9 +21,15 @@ import { Input, type InputProps } from "@chakra-ui/react"; import { paramPlaceholder, useParamStore } from "src/queries/useParamStore"; import type { FlexibleFormElementProps } from "."; +import { DateTimeInput } from "../DateTimeInput"; -export const FieldDateTime = ({ name, ...rest }: FlexibleFormElementProps & InputProps) => { - const { paramsDict, setParamsDict } = useParamStore(); +export const FieldDateTime = ({ + name, + namespace = "default", + onUpdate, + ...rest +}: FlexibleFormElementProps & InputProps) => { + const { disabled, paramsDict, setParamsDict } = useParamStore(namespace); const param = paramsDict[name] ?? paramPlaceholder; const handleChange = (value: string) => { if (paramsDict[name]) { @@ -39,13 +45,29 @@ export const FieldDateTime = ({ name, ...rest }: FlexibleFormElementProps & Inpu } setParamsDict(paramsDict); + onUpdate(value); }; + if (rest.type === "datetime-local") { + return ( + handleChange(event.target.value)} + size="sm" + value={((param.value ?? "") as string).slice(0, 16)} + /> + ); + } + return ( handleChange(event.target.value)} + required={rest.required} size="sm" type={rest.type} value={((param.value ?? "") as string).slice(0, 16)} diff --git a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDropdown.tsx b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDropdown.tsx index a12d4dc366d3f..2bd62a679ce51 100644 --- a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDropdown.tsx +++ b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldDropdown.tsx @@ -18,6 +18,7 @@ */ import { createListCollection } from "@chakra-ui/react/collection"; import { useRef } from "react"; +import { useTranslation } from "react-i18next"; import { Select } from "src/components/ui"; import { paramPlaceholder, useParamStore } from "src/queries/useParamStore"; @@ -33,8 +34,9 @@ const labelLookup = (key: string, valuesDisplay: Record | undefi }; const enumTypes = ["string", "number", "integer"]; -export const FieldDropdown = ({ name }: FlexibleFormElementProps) => { - const { paramsDict, setParamsDict } = useParamStore(); +export const FieldDropdown = ({ name, namespace = "default", onUpdate }: FlexibleFormElementProps) => { + const { t: translate } = useTranslation("components"); + const { disabled, paramsDict, setParamsDict } = useParamStore(namespace); const param = paramsDict[name] ?? paramPlaceholder; const selectOptions = createListCollection({ @@ -55,11 +57,13 @@ export const FieldDropdown = ({ name }: FlexibleFormElementProps) => { } setParamsDict(paramsDict); + onUpdate(value); }; return ( handleChange(event.value)} @@ -68,7 +72,7 @@ export const FieldDropdown = ({ name }: FlexibleFormElementProps) => { value={enumTypes.includes(typeof param.value) ? [param.value as string] : undefined} > - + {selectOptions.items.map((option) => ( diff --git a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultiSelect.tsx b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultiSelect.tsx index aa4a4e112f9f1..aa50c86ef6fc3 100644 --- a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultiSelect.tsx +++ b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultiSelect.tsx @@ -18,6 +18,7 @@ */ import { type MultiValue, Select as ReactSelect } from "chakra-react-select"; import { useState } from "react"; +import { useTranslation } from "react-i18next"; import { paramPlaceholder, useParamStore } from "src/queries/useParamStore"; @@ -31,8 +32,9 @@ const labelLookup = (key: string, valuesDisplay: Record | undefi return key; }; -export const FieldMultiSelect = ({ name }: FlexibleFormElementProps) => { - const { paramsDict, setParamsDict } = useParamStore(); +export const FieldMultiSelect = ({ name, namespace = "default", onUpdate }: FlexibleFormElementProps) => { + const { t: translate } = useTranslation("components"); + const { disabled, paramsDict, setParamsDict } = useParamStore(namespace); const param = paramsDict[name] ?? paramPlaceholder; // Initialize `selectedOptions` directly from `paramsDict` @@ -64,22 +66,25 @@ export const FieldMultiSelect = ({ name }: FlexibleFormElementProps) => { paramsDict[name].value = newValueArray; } setParamsDict(paramsDict); + onUpdate(String(newValueArray)); }; return ( ({ + (param.schema.examples ?? param.schema.enum)?.map((value) => ({ label: labelLookup(value, param.schema.values_display), value, })) ?? [] } + placeholder={translate("flexibleForm.placeholderMulti")} size="sm" value={selectedOptions} /> diff --git a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultilineText.tsx b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultilineText.tsx index bfefcb276d820..8306f125eb67b 100644 --- a/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultilineText.tsx +++ b/airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldMultilineText.tsx @@ -22,8 +22,8 @@ import { paramPlaceholder, useParamStore } from "src/queries/useParamStore"; import type { FlexibleFormElementProps } from "."; -export const FieldMultilineText = ({ name }: FlexibleFormElementProps) => { - const { paramsDict, setParamsDict } = useParamStore(); +export const FieldMultilineText = ({ name, namespace = "default", onUpdate }: FlexibleFormElementProps) => { + const { disabled, paramsDict, setParamsDict } = useParamStore(namespace); const param = paramsDict[name] ?? paramPlaceholder; const handleChange = (value: string) => { if (paramsDict[name]) { @@ -33,10 +33,12 @@ export const FieldMultilineText = ({ name }: FlexibleFormElementProps) => { } setParamsDict(paramsDict); + onUpdate(value); }; return (