diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py deleted file mode 100644 index ba66e10e44a35..0000000000000 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -""" -Example Airflow DAG for Google BigQuery service. -""" -import os -from datetime import datetime - -from airflow import models -from airflow.providers.google.cloud.operators.bigquery import ( - BigQueryCreateEmptyDatasetOperator, - BigQueryCreateEmptyTableOperator, - BigQueryDeleteDatasetOperator, -) -from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator - -PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project") -DATASET_NAME = os.environ.get("GCP_BIGQUERY_DATASET_NAME", "test_dataset_transfer") -DATA_EXPORT_BUCKET_NAME = os.environ.get("GCP_BIGQUERY_EXPORT_BUCKET_NAME", "INVALID BUCKET NAME") -TABLE = "table_42" - -with models.DAG( - "example_bigquery_to_gcs", - schedule_interval=None, # Override to match your needs - start_date=datetime(2021, 1, 1), - catchup=False, - tags=["example"], -) as dag: - bigquery_to_gcs = BigQueryToGCSOperator( - task_id="bigquery_to_gcs", - source_project_dataset_table=f"{DATASET_NAME}.{TABLE}", - destination_cloud_storage_uris=[f"gs://{DATA_EXPORT_BUCKET_NAME}/export-bigquery.csv"], - ) - - create_dataset = BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", dataset_id=DATASET_NAME) - - create_table = BigQueryCreateEmptyTableOperator( - task_id="create_table", - dataset_id=DATASET_NAME, - table_id=TABLE, - schema_fields=[ - {"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"}, - ], - ) - create_dataset >> create_table >> bigquery_to_gcs - - delete_dataset = BigQueryDeleteDatasetOperator( - task_id="delete_dataset", dataset_id=DATASET_NAME, delete_contents=True - ) - - bigquery_to_gcs >> delete_dataset diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs_system.py deleted file mode 100644 index 6887572c0f4fc..0000000000000 --- a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs_system.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -"""System tests for Google Cloud Build operators""" -import pytest - -from airflow.providers.google.cloud.example_dags.example_bigquery_to_gcs import DATA_EXPORT_BUCKET_NAME -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context - - -@pytest.mark.backend("mysql", "postgres") -@pytest.mark.system("google.cloud") -@pytest.mark.credential_file(GCP_BIGQUERY_KEY) -class BigQueryToGCSExampleDagsSystemTest(GoogleSystemTest): - @provide_gcp_context(GCP_BIGQUERY_KEY) - def setUp(self): - super().setUp() - self.create_gcs_bucket(DATA_EXPORT_BUCKET_NAME) - - @provide_gcp_context(GCP_BIGQUERY_KEY) - def test_run_example_dag_queries(self): - self.run_dag('example_bigquery_to_gcs', CLOUD_DAG_FOLDER) - - @provide_gcp_context(GCP_BIGQUERY_KEY) - def tearDown(self): - self.delete_gcs_bucket(DATA_EXPORT_BUCKET_NAME) - super().tearDown()