Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions airflow-core/docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,15 @@ Airflow workflows are defined entirely in Python. This "workflows as code" appro
- **Extensible**: The Airflow framework includes a wide range of built-in operators and can be extended to fit your needs.
- **Flexible**: Airflow leverages the `Jinja <https://jinja.palletsprojects.com>`_ templating engine, allowing rich customizations.

.. _task-sdk-docs:

Task SDK
========

For Airflow Task SDK, see the standalone reference & tutorial site:

https://airflow.apache.org/docs/task-sdk/stable/

Dags
-----------------------------------------

Expand Down
2 changes: 2 additions & 0 deletions airflow-core/src/airflow/example_dags/example_asset_alias.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@

from __future__ import annotations

# [START example_asset_alias]
import pendulum

from airflow.sdk import DAG, Asset, AssetAlias, task
Expand Down Expand Up @@ -94,3 +95,4 @@ def consume_asset_event_from_asset_alias(*, inlet_events=None):
print(event)

consume_asset_event_from_asset_alias()
# [END example_asset_alias]
4 changes: 2 additions & 2 deletions airflow-core/src/airflow/example_dags/example_assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,15 @@

from __future__ import annotations

# [START asset_def]
import pendulum

from airflow.providers.standard.operators.bash import BashOperator
from airflow.sdk import DAG, Asset
from airflow.timetables.assets import AssetOrTimeSchedule
from airflow.timetables.trigger import CronTriggerTimetable

# [START asset_def]
dag1_asset = Asset("s3://dag1/output_1.txt", extra={"hi": "bye"})
# [END asset_def]
dag2_asset = Asset("s3://dag2/output_1.txt", extra={"hi": "bye"})
dag3_asset = Asset("s3://dag3/output_3.txt", extra={"hi": "bye"})

Expand Down Expand Up @@ -189,3 +188,4 @@
task_id="conditional_asset_and_time_based_timetable",
bash_command="sleep 5",
)
# [END asset_def]
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
# under the License.
from __future__ import annotations

# [START dag_decorator_usage]
from typing import TYPE_CHECKING, Any

import httpx
Expand All @@ -43,7 +44,6 @@ def execute(self, context: Context):
return httpx.get(self.url).json()


# [START dag_decorator_usage]
@dag(
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

from __future__ import annotations

# [START example_dynamic_task_mapping]
from datetime import datetime

from airflow.sdk import DAG, task
Expand Down Expand Up @@ -56,3 +57,5 @@ def add_10(num):
_get_nums = get_nums()
_times_2 = times_2.expand(num=_get_nums)
add_10.expand(num=_times_2)

# [END example_dynamic_task_mapping]
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

from __future__ import annotations

# [START example_setup_teardown_taskflow]
import pendulum

from airflow.sdk import DAG, setup, task, task_group, teardown
Expand Down Expand Up @@ -104,3 +105,4 @@ def inner_teardown(cluster_id):

# and let's put section 1 inside the outer setup and teardown tasks
section_1()
# [END example_setup_teardown_taskflow]
3 changes: 3 additions & 0 deletions airflow-core/src/airflow/example_dags/example_simplest_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from __future__ import annotations

# [START simplest_dag]
from airflow.sdk import dag, task


Expand All @@ -30,4 +31,6 @@ def my_task():
my_task()


# [END simplest_dag]

example_simplest_dag()
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@

from __future__ import annotations

# [START howto_task_group_decorator]
import pendulum

from airflow.sdk import DAG, task, task_group


# [START howto_task_group_decorator]
# Creating Tasks
@task
def task_start():
Expand Down
2 changes: 2 additions & 0 deletions airflow-core/src/airflow/example_dags/example_xcomargs.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

from __future__ import annotations

# [START example_xcomargs]
import logging

import pendulum
Expand Down Expand Up @@ -63,3 +64,4 @@ def print_value(value, ts=None):
xcom_args_b = print_value("second!")

bash_op1 >> xcom_args_a >> xcom_args_b >> bash_op2
# [END example_xcomargs]
4 changes: 2 additions & 2 deletions dev/README_RELEASE_AIRFLOW.md
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ The command does the following:
3. Triggers S3 to GitHub Sync

```shell script
breeze workflow-run publish-docs --ref <tag> --site-env <staging/live/auto>
breeze workflow-run publish-docs --ref <tag> --site-env <staging/live/auto> apache-airflow docker-stack task-sdk
```

The `--ref` parameter should be the tag of the release candidate you are publishing.
Expand All @@ -387,7 +387,7 @@ The release manager publishes the documentation using GitHub Actions workflow
the tag you use - pre-release tags go to staging. But you can also override it and specify the destination
manually to be `live` or `staging`.

You should specify 'apache-airflow docker-stack' passed as packages to be
You should specify 'apache-airflow docker-stack task-sdk' passed as packages to be
built.

After that step, the provider documentation should be available under https://airflow.stage.apache.org//
Expand Down
6 changes: 6 additions & 0 deletions devel-common/src/sphinx_exts/docs_build/docs_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,10 @@ def _src_dir(self) -> Path:
if self.package_name.startswith("apache-airflow-providers-"):
package_paths = self.package_name[len("apache-airflow-providers-") :].split("-")
return (AIRFLOW_CONTENT_ROOT_PATH / "providers").joinpath(*package_paths) / "docs"
if self.package_name == "apache-airflow-ctl":
return AIRFLOW_CONTENT_ROOT_PATH / "airflow-ctl" / "docs"
if self.package_name == "task-sdk":
return AIRFLOW_CONTENT_ROOT_PATH / "task-sdk" / "docs"
console.print(f"[red]Unknown package name: {self.package_name}")
sys.exit(1)

Expand Down Expand Up @@ -330,6 +334,8 @@ def get_available_packages(include_suspended: bool = False, short_form: bool = F
"apache-airflow",
*provider_names,
"apache-airflow-providers",
"apache-airflow-ctl",
"task-sdk",
"helm-chart",
"docker-stack",
]
8 changes: 4 additions & 4 deletions devel-common/src/sphinx_exts/docs_build/spelling_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,17 @@ def __lt__(self, other):
line_no_b: int = other.line_no or 0
context_line_a: str = self.context_line or ""
context_line_b: str = other.context_line or ""
left: tuple[Path, int, int, str, str] = (
left: tuple[Path, int, str, str, str] = (
file_path_a,
line_no_a,
context_line_a,
self.spelling or "",
self.message or "",
)
right: tuple[Path, int, int, str, str] = (
right: tuple[Path, int, str, str, str] = (
file_path_b,
line_no_b or 0,
context_line_b or 0,
line_no_b,
context_line_b,
other.spelling or "",
other.message or "",
)
Expand Down
1 change: 1 addition & 0 deletions devel-common/src/sphinx_exts/exampleinclude.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ class ExampleInclude(SphinxDirective):
"emphasize-lines": directives.unchanged_required,
"class": directives.class_option,
"name": directives.unchanged,
"caption": directives.unchanged_required,
"diff": directives.unchanged_required,
}

Expand Down
Loading
Loading