Skip to content

Commit

Permalink
Enable Docs DAG in CI leveraging existing CI connections
Browse files Browse the repository at this point in the history
  • Loading branch information
pankajkoti committed Dec 26, 2024
1 parent 44f8655 commit d44d5bb
Showing 1 changed file with 9 additions and 40 deletions.
49 changes: 9 additions & 40 deletions dev/dags/dbt_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,6 @@
from pathlib import Path

from airflow import DAG
from airflow.decorators import task
from airflow.exceptions import AirflowNotFoundException
from airflow.hooks.base import BaseHook
from pendulum import datetime

from cosmos import ProfileConfig
Expand All @@ -27,9 +24,9 @@
DEFAULT_DBT_ROOT_PATH = Path(__file__).parent / "dbt"
DBT_ROOT_PATH = Path(os.getenv("DBT_ROOT_PATH", DEFAULT_DBT_ROOT_PATH))

S3_CONN_ID = "aws_docs"
AZURE_CONN_ID = "azure_docs"
GCS_CONN_ID = "gcs_docs"
S3_CONN_ID = "aws_s3_conn"
AZURE_CONN_ID = "azure_wasb_conn"
GCS_CONN_ID = "gcp_gs_conn"

profile_config = ProfileConfig(
profile_name="default",
Expand All @@ -41,32 +38,6 @@
)


@task.branch(task_id="which_upload")
def which_upload():
"""Only run the docs tasks if we have the proper connections set up"""
downstream_tasks_to_run = []

try:
BaseHook.get_connection(S3_CONN_ID)
downstream_tasks_to_run += ["generate_dbt_docs_aws"]
except AirflowNotFoundException:
pass

# if we have an AZURE_CONN_ID, check if it's valid
try:
BaseHook.get_connection(AZURE_CONN_ID)
downstream_tasks_to_run += ["generate_dbt_docs_azure"]
except AirflowNotFoundException:
pass
try:
BaseHook.get_connection(GCS_CONN_ID)
downstream_tasks_to_run += ["generate_dbt_docs_gcs"]
except AirflowNotFoundException:
pass

return downstream_tasks_to_run


with DAG(
dag_id="docs_dag",
start_date=datetime(2023, 1, 1),
Expand All @@ -79,24 +50,22 @@ def which_upload():
task_id="generate_dbt_docs_aws",
project_dir=DBT_ROOT_PATH / "jaffle_shop",
profile_config=profile_config,
connection_id=S3_CONN_ID,
bucket_name="cosmos-docs",
connection_id="aws_s3_conn",
bucket_name="cosmos-ci-docs",
)

generate_dbt_docs_azure = DbtDocsAzureStorageOperator(
task_id="generate_dbt_docs_azure",
project_dir=DBT_ROOT_PATH / "jaffle_shop",
profile_config=profile_config,
connection_id=AZURE_CONN_ID,
bucket_name="$web",
connection_id="azure_wasb_conn",
bucket_name="cosmos-ci-docs",
)

generate_dbt_docs_gcs = DbtDocsGCSOperator(
task_id="generate_dbt_docs_gcs",
project_dir=DBT_ROOT_PATH / "jaffle_shop",
profile_config=profile_config,
connection_id=GCS_CONN_ID,
bucket_name="cosmos-docs",
connection_id="gcp_gs_conn",
bucket_name="cosmos-ci-docs",
)

which_upload() >> [generate_dbt_docs_aws, generate_dbt_docs_azure, generate_dbt_docs_gcs]

0 comments on commit d44d5bb

Please sign in to comment.