Skip to content

Commit

Permalink
Rename dataset related python variable names to asset (apache#41348)
Browse files Browse the repository at this point in the history
  • Loading branch information
Lee-W authored and ellisms committed Nov 13, 2024
1 parent 161786c commit 9a38320
Show file tree
Hide file tree
Showing 199 changed files with 5,061 additions and 4,127 deletions.
4 changes: 2 additions & 2 deletions RELEASE_NOTES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -642,7 +642,7 @@ Dataset URIs are now validated on input (#37005)

Datasets must use a URI that conform to rules laid down in AIP-60, and the value
will be automatically normalized when the DAG file is parsed. See
`documentation on Datasets <https://airflow.apache.org/docs/apache-airflow/stable/authoring-and-scheduling/datasets.html>`_ for
`documentation on Datasets <https://airflow.apache.org/docs/apache-airflow/2.9.0/authoring-and-scheduling/datasets.html>`_ for
a more detailed description on the rules.

You may need to change your Dataset identifiers if they look like a URI, but are
Expand Down Expand Up @@ -3264,7 +3264,7 @@ If you have the producer and consumer in different files you do not need to use
Datasets represent the abstract concept of a dataset, and (for now) do not have any direct read or write
capability - in this release we are adding the foundational feature that we will build upon.

For more info on Datasets please see :doc:`/authoring-and-scheduling/datasets`.
For more info on Datasets please see `Datasets documentation <https://airflow.apache.org/docs/apache-airflow/2.4.0/authoring-and-scheduling/datasets.html>`_.

Expanded dynamic task mapping support
"""""""""""""""""""""""""""""""""""""
Expand Down
6 changes: 3 additions & 3 deletions airflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
__all__ = [
"__version__",
"DAG",
"Dataset",
"Asset",
"XComArg",
]

Expand All @@ -76,7 +76,7 @@
# Things to lazy import in form {local_name: ('target_module', 'target_name', 'deprecated')}
__lazy_imports: dict[str, tuple[str, str, bool]] = {
"DAG": (".models.dag", "DAG", False),
"Dataset": (".datasets", "Dataset", False),
"Asset": (".assets", "Asset", False),
"XComArg": (".models.xcom_arg", "XComArg", False),
"version": (".version", "", False),
# Deprecated lazy imports
Expand All @@ -86,8 +86,8 @@
# These objects are imported by PEP-562, however, static analyzers and IDE's
# have no idea about typing of these objects.
# Add it under TYPE_CHECKING block should help with it.
from airflow.models.asset import Asset
from airflow.models.dag import DAG
from airflow.models.dataset import Dataset
from airflow.models.xcom_arg import XComArg


Expand Down
16 changes: 8 additions & 8 deletions airflow/api_connexion/endpoints/dag_run_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,10 @@
format_datetime,
format_parameters,
)
from airflow.api_connexion.schemas.asset_schema import (
AssetEventCollection,
asset_event_collection_schema,
)
from airflow.api_connexion.schemas.dag_run_schema import (
DAGRunCollection,
DAGRunCollectionSchema,
Expand All @@ -50,10 +54,6 @@
set_dagrun_note_form_schema,
set_dagrun_state_form_schema,
)
from airflow.api_connexion.schemas.dataset_schema import (
DatasetEventCollection,
dataset_event_collection_schema,
)
from airflow.api_connexion.schemas.task_instance_schema import (
TaskInstanceReferenceCollection,
task_instance_reference_collection_schema,
Expand Down Expand Up @@ -112,12 +112,12 @@ def get_dag_run(


@security.requires_access_dag("GET", DagAccessEntity.RUN)
@security.requires_access_dataset("GET")
@security.requires_access_asset("GET")
@provide_session
def get_upstream_dataset_events(
*, dag_id: str, dag_run_id: str, session: Session = NEW_SESSION
) -> APIResponse:
"""If dag run is dataset-triggered, return the dataset events that triggered it."""
"""If dag run is dataset-triggered, return the asset events that triggered it."""
dag_run: DagRun | None = session.scalar(
select(DagRun).where(
DagRun.dag_id == dag_id,
Expand All @@ -130,8 +130,8 @@ def get_upstream_dataset_events(
detail=f"DAGRun with DAG ID: '{dag_id}' and DagRun ID: '{dag_run_id}' not found",
)
events = dag_run.consumed_dataset_events
return dataset_event_collection_schema.dump(
DatasetEventCollection(dataset_events=events, total_entries=len(events))
return asset_event_collection_schema.dump(
AssetEventCollection(dataset_events=events, total_entries=len(events))
)


Expand Down
Loading

0 comments on commit 9a38320

Please sign in to comment.