diff --git a/airflow-core/tests/unit/always/test_project_structure.py b/airflow-core/tests/unit/always/test_project_structure.py index 9740fb37939c5..989aa2de1ba50 100644 --- a/airflow-core/tests/unit/always/test_project_structure.py +++ b/airflow-core/tests/unit/always/test_project_structure.py @@ -468,6 +468,31 @@ class TestGoogleProviderProjectStructure(ExampleCoverageTest, AssetsCoverageTest "airflow.providers.google.marketing_platform.operators.GoogleDisplayVideo360RunQueryOperator", "airflow.providers.google.marketing_platform.operators.GoogleDisplayVideo360DownloadReportV2Operator", "airflow.providers.google.marketing_platform.sensors.GoogleDisplayVideo360RunQuerySensor", + "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook", + "airflow.providers.google.cloud.links.datacatalog.DataCatalogEntryGroupLink", + "airflow.providers.google.cloud.links.datacatalog.DataCatalogEntryLink", + "airflow.providers.google.cloud.links.datacatalog.DataCatalogTagTemplateLink", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogListTagsOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogLookupEntryOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogSearchCatalogOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator", } BASE_CLASSES = { @@ -496,6 +521,7 @@ class TestGoogleProviderProjectStructure(ExampleCoverageTest, AssetsCoverageTest "airflow.providers.google.cloud.operators.vertex_ai.auto_ml.AutoMLTrainingJobBaseOperator", "airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.UpdateEndpointOperator", "airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.GetBatchPredictionJobOperator", + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator", } ASSETS_NOT_REQUIRED = { diff --git a/providers/google/docs/operators/cloud/datacatalog.rst b/providers/google/docs/operators/cloud/datacatalog.rst index 3f71e6eff971e..01d8a68f4e957 100644 --- a/providers/google/docs/operators/cloud/datacatalog.rst +++ b/providers/google/docs/operators/cloud/datacatalog.rst @@ -19,6 +19,14 @@ Google Cloud Data Catalog Operators ======================================= +.. _datacatalog-deprecation-warning: +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. + The Data Catalog operators replacement can be found at `airflow.providers.google.cloud.operators.dataplex` + For further understanding please refer to the `official guide `__. + Mapping between entities from Data Catalog and Dataplex Universal Catalog presented in table + `Mapping between Data Catalog and Dataplex Universal Catalog `__ + under `Learn more about simultaneous availability of Data Catalog metadata in Dataplex Universal Catalog` block. The `Data Catalog `__ is a fully managed and scalable metadata management service that allows organizations to quickly discover, manage and understand all their data in @@ -40,6 +48,12 @@ Prerequisite Tasks Managing an entries ^^^^^^^^^^^^^^^^^^^ +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryOperator` or + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogLookupEntryOperator`. + For more information please check this :ref:`section `. + Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.Entry` for representing entry .. contents:: @@ -52,6 +66,12 @@ Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.Entry` for repr Getting an entry """""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryOperator` or + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogLookupEntryOperator`. + For more information please check this :ref:`section `. + Getting an entry is performed with the :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryOperator` and :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogLookupEntryOperator` @@ -59,58 +79,33 @@ operators. The ``CloudDataCatalogGetEntryOperator`` use Project ID, Entry Group ID, Entry ID to get the entry. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_get_entry] - :end-before: [END howto_operator_gcp_datacatalog_get_entry] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryOperator` parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_get_entry_result] - :end-before: [END howto_operator_gcp_datacatalog_get_entry_result] - The ``CloudDataCatalogLookupEntryOperator`` use the resource name to get the entry. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_linked_resource] - :end-before: [END howto_operator_gcp_datacatalog_lookup_entry_linked_resource] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogLookupEntryOperator` parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_result] - :end-before: [END howto_operator_gcp_datacatalog_lookup_entry_result] - .. _howto/operator:CloudDataCatalogCreateEntryOperator: Creating an entry """"""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator` operator create the entry. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs] - :end-before: [END howto_operator_gcp_datacatalog_create_entry_gcs] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator` parameters which allows you to dynamically determine values. @@ -119,26 +114,19 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry ID can be read with the ``entry_id`` key. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs_result] - :end-before: [END howto_operator_gcp_datacatalog_create_entry_gcs_result] - .. _howto/operator:CloudDataCatalogUpdateEntryOperator: Updating an entry """"""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator` operator update the entry. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_update_entry] - :end-before: [END howto_operator_gcp_datacatalog_update_entry] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator` parameters which allows you to dynamically determine values. @@ -148,15 +136,14 @@ parameters which allows you to dynamically determine values. Deleting a entry """""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogDeleteEntryOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator` operator delete the entry. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_delete_entry] - :end-before: [END howto_operator_gcp_datacatalog_delete_entry] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator` parameters which allows you to dynamically determine values. @@ -166,6 +153,11 @@ parameters which allows you to dynamically determine values. Managing a entry groups ^^^^^^^^^^^^^^^^^^^^^^^ +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryGroupOperator`. + For more information please check this :ref:`section `. + Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.Entry` for representing a entry groups. .. contents:: @@ -177,15 +169,14 @@ Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.Entry` for repr Creating an entry group """"""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryGroupOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator` operator create the entry group. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_entry_group] - :end-before: [END howto_operator_gcp_datacatalog_create_entry_group] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator` parameters which allows you to dynamically determine values. @@ -194,52 +185,38 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry group ID can be read with the ``entry_group_id`` key. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_entry_group_result] - :end-before: [END howto_operator_gcp_datacatalog_create_entry_group_result] - .. _howto/operator:CloudDataCatalogGetEntryGroupOperator: Getting an entry group """""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryGroupOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator` operator get the entry group. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_get_entry_group] - :end-before: [END howto_operator_gcp_datacatalog_get_entry_group] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator` parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_get_entry_group_result] - :end-before: [END howto_operator_gcp_datacatalog_get_entry_group_result] - .. _howto/operator:CloudDataCatalogDeleteEntryGroupOperator: Deleting an entry group """"""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogDeleteEntryGroupOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator` operator delete the entry group. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_entries.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_delete_entry_group] - :end-before: [END howto_operator_gcp_datacatalog_delete_entry_group] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator` parameters which allows you to dynamically determine values. @@ -249,6 +226,11 @@ parameters which allows you to dynamically determine values. Managing tag templates ^^^^^^^^^^^^^^^^^^^^^^ +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateAspectTypeOperator`. + For more information please check this :ref:`section `. + Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplate` for representing a tag templates. .. contents:: @@ -260,15 +242,14 @@ Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplate` fo Creating a tag template """"""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_tag_template] - :end-before: [END howto_operator_gcp_datacatalog_create_tag_template] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator` parameters which allows you to dynamically determine values. @@ -277,26 +258,19 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag template ID can be read with the ``tag_template_id`` key. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_result] - :end-before: [END howto_operator_gcp_datacatalog_create_tag_template_result] - .. _howto/operator:CloudDataCatalogDeleteTagTemplateOperator: Deleting a tag template """"""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogDeleteAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator` operator delete the tag template. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template] - :end-before: [END howto_operator_gcp_datacatalog_delete_tag_template] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator` parameters which allows you to dynamically determine values. @@ -307,41 +281,33 @@ parameters which allows you to dynamically determine values. Getting a tag template """""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_get_tag_template] - :end-before: [END howto_operator_gcp_datacatalog_get_tag_template] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator` parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_get_tag_template_result] - :end-before: [END howto_operator_gcp_datacatalog_get_tag_template_result] - .. _howto/operator:CloudDataCatalogUpdateTagTemplateOperator: Updating a tag template """"""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator` operator update the tag template. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_update_tag_template] - :end-before: [END howto_operator_gcp_datacatalog_update_tag_template] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator` parameters which allows you to dynamically determine values. @@ -351,6 +317,12 @@ parameters which allows you to dynamically determine values. Managing tags ^^^^^^^^^^^^^ +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryOperator` or + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator`. + For more information please check this :ref:`section `. + Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.Tag` for representing a tag. .. contents:: @@ -362,15 +334,15 @@ Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.Tag` for repres Creating a tag on an entry """""""""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryOperator` or + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator` operator get the tag template. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tags.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_tag] - :end-before: [END howto_operator_gcp_datacatalog_create_tag] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator` parameters which allows you to dynamically determine values. @@ -379,26 +351,19 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag ID can be read with the ``tag_id`` key. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tags.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_tag_result] - :end-before: [END howto_operator_gcp_datacatalog_create_tag_result] - .. _howto/operator:CloudDataCatalogUpdateTagOperator: Updating a tag """""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator` operator update the tag template. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tags.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_update_tag] - :end-before: [END howto_operator_gcp_datacatalog_update_tag] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator` parameters which allows you to dynamically determine values. @@ -408,15 +373,14 @@ parameters which allows you to dynamically determine values. Deleting a tag """""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator` operator delete the tag template. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tags.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_delete_tag] - :end-before: [END howto_operator_gcp_datacatalog_delete_tag] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator` parameters which allows you to dynamically determine values. @@ -426,33 +390,31 @@ parameters which allows you to dynamically determine values. Listing tags on an entry """""""""""""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogListTagsOperator` operator get list of the tags on the entry. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tags.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_list_tags] - :end-before: [END howto_operator_gcp_datacatalog_list_tags] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogListTagsOperator` parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tags.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_list_tags_result] - :end-before: [END howto_operator_gcp_datacatalog_list_tags_result] - - .. _howto/operator:CloudDataCatalogTagTemplateFieldssOperators: Managing a tag template fields ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator` or + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateAspectTypeOperator`. + For more information please check this :ref:`section `. + Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplateField` for representing a tag template fields. .. contents:: @@ -464,15 +426,15 @@ Operators uses a :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplateFiel Creating a field """""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator` or + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field] - :end-before: [END howto_operator_gcp_datacatalog_create_tag_template_field] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator` parameters which allows you to dynamically determine values. @@ -481,26 +443,19 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created field ID can be read with the ``tag_template_field_id`` key. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field_result] - :end-before: [END howto_operator_gcp_datacatalog_create_tag_template_field_result] - .. _howto/operator:CloudDataCatalogRenameTagTemplateFieldOperator: Renaming a field """""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator` operator rename the tag template field. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_rename_tag_template_field] - :end-before: [END howto_operator_gcp_datacatalog_rename_tag_template_field] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator` parameters which allows you to dynamically determine values. @@ -510,15 +465,14 @@ parameters which allows you to dynamically determine values. Updating a field """""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_update_tag_template_field] - :end-before: [END howto_operator_gcp_datacatalog_update_tag_template_field] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator` parameters which allows you to dynamically determine values. @@ -529,15 +483,14 @@ parameters which allows you to dynamically determine values. Deleting a field """""""""""""""" +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator`. + For more information please check this :ref:`section `. + The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator` operator delete the tag template field. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template_field] - :end-before: [END howto_operator_gcp_datacatalog_delete_tag_template_field] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator` parameters which allows you to dynamically determine values. @@ -546,31 +499,24 @@ parameters which allows you to dynamically determine values. .. _howto/operator:CloudDataCatalogSearchCatalogOperator: Search resources -^^^^^^^^^^^^^^^^ +"""""""""""""""" + +.. warning:: + The Data Catalog will be discontinued on January 30, 2026 in favor of Dataplex Universal Catalog. Please use + :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCatalogSearchEntriesOperator`. + For more information please check this :ref:`section `. The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogSearchCatalogOperator` operator searches Data Catalog for multiple resources like entries, tags that match a query. The ``query`` parameters should defined using `search syntax `__. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_search_catalog] - :end-before: [END howto_operator_gcp_datacatalog_search_catalog] - You can use :ref:`Jinja templating ` with :template-fields:`airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogSearchCatalogOperator` parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_gcp_datacatalog_search_catalog_result] - :end-before: [END howto_operator_gcp_datacatalog_search_catalog_result] - Reference ^^^^^^^^^ diff --git a/providers/google/provider.yaml b/providers/google/provider.yaml index 22abf30bb333c..6adde975f34b7 100644 --- a/providers/google/provider.yaml +++ b/providers/google/provider.yaml @@ -493,9 +493,6 @@ operators: - integration-name: Google Compute Engine python-modules: - airflow.providers.google.cloud.operators.compute - - integration-name: Google Data Catalog - python-modules: - - airflow.providers.google.cloud.operators.datacatalog - integration-name: Google Dataflow python-modules: - airflow.providers.google.cloud.operators.dataflow @@ -752,9 +749,6 @@ hooks: python-modules: - airflow.providers.google.cloud.hooks.compute - airflow.providers.google.cloud.hooks.compute_ssh - - integration-name: Google Data Catalog - python-modules: - - airflow.providers.google.cloud.hooks.datacatalog - integration-name: Google Dataflow python-modules: - airflow.providers.google.cloud.hooks.dataflow @@ -1145,9 +1139,6 @@ extra-links: - airflow.providers.google.cloud.links.compute.ComputeInstanceGroupManagerDetailsLink - airflow.providers.google.cloud.links.cloud_tasks.CloudTasksQueueLink - airflow.providers.google.cloud.links.cloud_tasks.CloudTasksLink - - airflow.providers.google.cloud.links.datacatalog.DataCatalogEntryGroupLink - - airflow.providers.google.cloud.links.datacatalog.DataCatalogEntryLink - - airflow.providers.google.cloud.links.datacatalog.DataCatalogTagTemplateLink - airflow.providers.google.cloud.links.dataproc.DataprocLink - airflow.providers.google.cloud.links.dataproc.DataprocListLink - airflow.providers.google.cloud.links.dataproc.DataprocClusterLink diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/datacatalog.py b/providers/google/src/airflow/providers/google/cloud/hooks/datacatalog.py index 2cfa308ff9248..5f599eb2fee6f 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/datacatalog.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/datacatalog.py @@ -32,8 +32,9 @@ TagTemplateField, ) -from airflow.exceptions import AirflowException +from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.google.common.consts import CLIENT_INFO +from airflow.providers.google.common.deprecated import deprecated from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook if TYPE_CHECKING: @@ -41,6 +42,13 @@ from google.protobuf.field_mask_pb2 import FieldMask +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.hooks.dataplex.DataplexHook", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogHook(GoogleBaseHook): """ Hook for Google Cloud Data Catalog Service. diff --git a/providers/google/src/airflow/providers/google/cloud/links/datacatalog.py b/providers/google/src/airflow/providers/google/cloud/links/datacatalog.py index 4cc15dd97af75..d55687e92a5f6 100644 --- a/providers/google/src/airflow/providers/google/cloud/links/datacatalog.py +++ b/providers/google/src/airflow/providers/google/cloud/links/datacatalog.py @@ -21,7 +21,9 @@ from typing import TYPE_CHECKING +from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.google.cloud.links.base import BaseGoogleLink +from airflow.providers.google.common.deprecated import deprecated if TYPE_CHECKING: from airflow.models import BaseOperator @@ -43,6 +45,13 @@ ) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.links.dataplex.DataplexCatalogEntryGroupLink", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class DataCatalogEntryGroupLink(BaseGoogleLink): """Helper class for constructing Data Catalog Entry Group Link.""" @@ -65,6 +74,13 @@ def persist( ) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.links.dataplex.DataplexCatalogEntryLink", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class DataCatalogEntryLink(BaseGoogleLink): """Helper class for constructing Data Catalog Entry Link.""" @@ -93,6 +109,13 @@ def persist( ) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.links.dataplex.DataplexCatalogAspectTypeLink", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class DataCatalogTagTemplateLink(BaseGoogleLink): """Helper class for constructing Data Catalog Tag Template Link.""" diff --git a/providers/google/src/airflow/providers/google/cloud/operators/datacatalog.py b/providers/google/src/airflow/providers/google/cloud/operators/datacatalog.py index 13d29c5772fd4..08a6654d44ba1 100644 --- a/providers/google/src/airflow/providers/google/cloud/operators/datacatalog.py +++ b/providers/google/src/airflow/providers/google/cloud/operators/datacatalog.py @@ -32,6 +32,7 @@ TagTemplateField, ) +from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.google.cloud.hooks.datacatalog import CloudDataCatalogHook from airflow.providers.google.cloud.links.datacatalog import ( DataCatalogEntryGroupLink, @@ -39,6 +40,7 @@ DataCatalogTagTemplateLink, ) from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator +from airflow.providers.google.common.deprecated import deprecated from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID if TYPE_CHECKING: @@ -48,6 +50,13 @@ from airflow.utils.context import Context +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogCreateEntryOperator(GoogleCloudBaseOperator): """ Creates an entry. @@ -166,6 +175,13 @@ def execute(self, context: Context): return Entry.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryGroupOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogCreateEntryGroupOperator(GoogleCloudBaseOperator): """ Creates an EntryGroup. @@ -279,6 +295,14 @@ def execute(self, context: Context): return EntryGroup.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateEntryOperator, " + "airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogCreateTagOperator(GoogleCloudBaseOperator): """ Creates a tag on an entry. @@ -415,6 +439,13 @@ def execute(self, context: Context): return Tag.to_dict(tag) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogCreateTagTemplateOperator(GoogleCloudBaseOperator): """ Creates a tag template. @@ -525,6 +556,14 @@ def execute(self, context: Context): return TagTemplate.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator, " + "airflow.providers.google.cloud.operators.dataplex.DataplexCatalogCreateAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogCreateTagTemplateFieldOperator(GoogleCloudBaseOperator): r""" Creates a field in a tag template. @@ -644,6 +683,13 @@ def execute(self, context: Context): return TagTemplateField.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogDeleteEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogDeleteEntryOperator(GoogleCloudBaseOperator): """ Deletes an existing entry. @@ -729,6 +775,13 @@ def execute(self, context: Context) -> None: self.log.info("Entry doesn't exists. Skipping.") +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogDeleteEntryGroupOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogDeleteEntryGroupOperator(GoogleCloudBaseOperator): """ Deletes an EntryGroup. @@ -811,6 +864,13 @@ def execute(self, context: Context) -> None: self.log.info("Entry doesn't exists. skipping") +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogDeleteTagOperator(GoogleCloudBaseOperator): """ Deletes a tag. @@ -901,6 +961,13 @@ def execute(self, context: Context) -> None: self.log.info("Entry doesn't exists. skipping") +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogDeleteAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogDeleteTagTemplateOperator(GoogleCloudBaseOperator): """ Deletes a tag template and all tags using the template. @@ -988,6 +1055,13 @@ def execute(self, context: Context) -> None: self.log.info("Tag Template doesn't exists. skipping") +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogDeleteTagTemplateFieldOperator(GoogleCloudBaseOperator): """ Deletes a field in a tag template and all uses of that field. @@ -1078,6 +1152,13 @@ def execute(self, context: Context) -> None: self.log.info("Tag Template field doesn't exists. skipping") +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogGetEntryOperator(GoogleCloudBaseOperator): """ Gets an entry. @@ -1170,6 +1251,13 @@ def execute(self, context: Context) -> dict: return Entry.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryGroupOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogGetEntryGroupOperator(GoogleCloudBaseOperator): """ Gets an entry group. @@ -1264,6 +1352,13 @@ def execute(self, context: Context) -> dict: return EntryGroup.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogGetTagTemplateOperator(GoogleCloudBaseOperator): """ Gets a tag template. @@ -1350,6 +1445,13 @@ def execute(self, context: Context) -> dict: return TagTemplate.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogGetEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogListTagsOperator(GoogleCloudBaseOperator): """ Lists the tags on an Entry. @@ -1450,6 +1552,13 @@ def execute(self, context: Context) -> list: return [Tag.to_dict(item) for item in result] +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogLookupEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogLookupEntryOperator(GoogleCloudBaseOperator): r""" Get an entry by target resource name. @@ -1541,6 +1650,13 @@ def execute(self, context: Context) -> dict: return Entry.to_dict(result) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogRenameTagTemplateFieldOperator(GoogleCloudBaseOperator): """ Renames a field in a tag template. @@ -1638,6 +1754,13 @@ def execute(self, context: Context) -> None: ) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogSearchEntriesOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogSearchCatalogOperator(GoogleCloudBaseOperator): r""" Searches Data Catalog for multiple resources like entries, tags that match a query. @@ -1748,6 +1871,13 @@ def execute(self, context: Context) -> list: return [SearchCatalogResult.to_dict(item) for item in result] +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogUpdateEntryOperator(GoogleCloudBaseOperator): """ Updates an existing entry. @@ -1858,6 +1988,13 @@ def execute(self, context: Context) -> None: ) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateEntryOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogUpdateTagOperator(GoogleCloudBaseOperator): """ Updates an existing tag. @@ -1972,6 +2109,13 @@ def execute(self, context: Context) -> None: ) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogUpdateTagTemplateOperator(GoogleCloudBaseOperator): """ Updates a tag template. @@ -2081,6 +2225,13 @@ def execute(self, context: Context) -> None: ) +@deprecated( + planned_removal_date="January 30, 2026", + use_instead="airflow.providers.google.cloud.operators.dataplex.DataplexCatalogUpdateAspectTypeOperator", + reason="The Data Catalog will be discontinued on January 30, 2026 " + "in favor of Dataplex Universal Catalog.", + category=AirflowProviderDeprecationWarning, +) class CloudDataCatalogUpdateTagTemplateFieldOperator(GoogleCloudBaseOperator): """ Updates a field in a tag template. This method cannot be used to update the field type. diff --git a/providers/google/src/airflow/providers/google/get_provider_info.py b/providers/google/src/airflow/providers/google/get_provider_info.py index b816bc9a28f49..f615cacb6f87d 100644 --- a/providers/google/src/airflow/providers/google/get_provider_info.py +++ b/providers/google/src/airflow/providers/google/get_provider_info.py @@ -528,10 +528,6 @@ def get_provider_info(): "integration-name": "Google Compute Engine", "python-modules": ["airflow.providers.google.cloud.operators.compute"], }, - { - "integration-name": "Google Data Catalog", - "python-modules": ["airflow.providers.google.cloud.operators.datacatalog"], - }, { "integration-name": "Google Dataflow", "python-modules": ["airflow.providers.google.cloud.operators.dataflow"], @@ -865,10 +861,6 @@ def get_provider_info(): "airflow.providers.google.cloud.hooks.compute_ssh", ], }, - { - "integration-name": "Google Data Catalog", - "python-modules": ["airflow.providers.google.cloud.hooks.datacatalog"], - }, { "integration-name": "Google Dataflow", "python-modules": ["airflow.providers.google.cloud.hooks.dataflow"], @@ -1414,9 +1406,6 @@ def get_provider_info(): "airflow.providers.google.cloud.links.compute.ComputeInstanceGroupManagerDetailsLink", "airflow.providers.google.cloud.links.cloud_tasks.CloudTasksQueueLink", "airflow.providers.google.cloud.links.cloud_tasks.CloudTasksLink", - "airflow.providers.google.cloud.links.datacatalog.DataCatalogEntryGroupLink", - "airflow.providers.google.cloud.links.datacatalog.DataCatalogEntryLink", - "airflow.providers.google.cloud.links.datacatalog.DataCatalogTagTemplateLink", "airflow.providers.google.cloud.links.dataproc.DataprocLink", "airflow.providers.google.cloud.links.dataproc.DataprocListLink", "airflow.providers.google.cloud.links.dataproc.DataprocClusterLink", diff --git a/providers/google/tests/deprecations_ignore.yml b/providers/google/tests/deprecations_ignore.yml index 81ab445000bb9..17f07c00a6d7d 100644 --- a/providers/google/tests/deprecations_ignore.yml +++ b/providers/google/tests/deprecations_ignore.yml @@ -113,3 +113,70 @@ - providers/google/tests/unit/google/cloud/hooks/test_automl.py::TestAutoMLHook::test_list_datasets - providers/google/tests/unit/google/cloud/hooks/test_automl.py::TestAutoMLHook::test_delete_dataset - providers/google/tests/unit/google/cloud/hooks/test_automl.py::TestAutoMLHook::test_get_dataset +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalog::test_lookup_entry_with_linked_resource +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalog::test_lookup_entry_with_sql_resource +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalog::test_lookup_entry_without_resource +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalog::test_search_catalog +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_create_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_create_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_create_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_create_tag_protobuff +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_create_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_create_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_delete_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_delete_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_delete_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_delete_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_delete_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_get_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_get_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_get_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_list_tags +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_get_tag_for_template_name +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_rename_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_update_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_update_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_update_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithDefaultProjectIdHook::test_update_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_create_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_create_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_create_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_create_tag_protobuff +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_create_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_create_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_delete_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_delete_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_delete_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_delete_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_delete_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_get_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_get_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_get_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_list_tags +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_get_tag_for_template_name +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_rename_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_update_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_update_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_update_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogWithoutDefaultProjectIdHook::test_update_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_create_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_create_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_create_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_create_tag_protobuff +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_create_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_create_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_delete_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_delete_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_delete_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_delete_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_delete_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_get_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_get_entry_group +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_get_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_list_tags +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_get_tag_for_template_name +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_rename_tag_template_field +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_update_entry +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_update_tag +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_update_tag_template +- providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py::TestCloudDataCatalogMissingProjectIdHook::test_update_tag_template_field diff --git a/providers/google/tests/system/google/datacatalog/__init__.py b/providers/google/tests/system/google/datacatalog/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/providers/google/tests/system/google/datacatalog/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_entries.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_entries.py deleted file mode 100644 index a03e172bc3e45..0000000000000 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_entries.py +++ /dev/null @@ -1,213 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os -from datetime import datetime - -from google.protobuf.field_mask_pb2 import FieldMask - -from airflow.models.dag import DAG -from airflow.models.xcom_arg import XComArg -from airflow.providers.google.cloud.operators.datacatalog import ( - CloudDataCatalogCreateEntryGroupOperator, - CloudDataCatalogCreateEntryOperator, - CloudDataCatalogDeleteEntryGroupOperator, - CloudDataCatalogDeleteEntryOperator, - CloudDataCatalogGetEntryGroupOperator, - CloudDataCatalogGetEntryOperator, - CloudDataCatalogLookupEntryOperator, - CloudDataCatalogUpdateEntryOperator, -) -from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator -from airflow.providers.standard.operators.bash import BashOperator -from airflow.utils.trigger_rule import TriggerRule - -from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") -PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -DAG_ID = "datacatalog_entries" - -BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}" -LOCATION = "us-central1" -ENTRY_GROUP_ID = f"id_{DAG_ID}_{ENV_ID}" -ENTRY_GROUP_NAME = f"name {DAG_ID} {ENV_ID}" -ENTRY_ID = "python_files" -ENTRY_NAME = "Wizard" - -with DAG( - DAG_ID, - schedule="@once", - start_date=datetime(2021, 1, 1), - catchup=False, -) as dag: - create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME) - - # Create - # [START howto_operator_gcp_datacatalog_create_entry_group] - create_entry_group = CloudDataCatalogCreateEntryGroupOperator( - task_id="create_entry_group", - location=LOCATION, - entry_group_id=ENTRY_GROUP_ID, - entry_group={"display_name": ENTRY_GROUP_NAME}, - ) - # [END howto_operator_gcp_datacatalog_create_entry_group] - - # [START howto_operator_gcp_datacatalog_create_entry_group_result] - create_entry_group_result = BashOperator( - task_id="create_entry_group_result", - bash_command=f"echo {XComArg(create_entry_group, key='entry_group_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_entry_group_result] - - # [START howto_operator_gcp_datacatalog_create_entry_gcs] - create_entry_gcs = CloudDataCatalogCreateEntryOperator( - task_id="create_entry_gcs", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry_id=ENTRY_ID, - entry={ - "display_name": ENTRY_NAME, - "type_": "FILESET", - "gcs_fileset_spec": {"file_patterns": [f"gs://{BUCKET_NAME}/**"]}, - }, - ) - # [END howto_operator_gcp_datacatalog_create_entry_gcs] - - # [START howto_operator_gcp_datacatalog_create_entry_gcs_result] - create_entry_gcs_result = BashOperator( - task_id="create_entry_gcs_result", - bash_command=f"echo {XComArg(create_entry_gcs, key='entry_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_entry_gcs_result] - - # Get - # [START howto_operator_gcp_datacatalog_get_entry_group] - get_entry_group = CloudDataCatalogGetEntryGroupOperator( - task_id="get_entry_group", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - read_mask=FieldMask(paths=["name", "display_name"]), - ) - # [END howto_operator_gcp_datacatalog_get_entry_group] - - # [START howto_operator_gcp_datacatalog_get_entry_group_result] - get_entry_group_result = BashOperator( - task_id="get_entry_group_result", - bash_command=f"echo {get_entry_group.output}", - ) - # [END howto_operator_gcp_datacatalog_get_entry_group_result] - - # [START howto_operator_gcp_datacatalog_get_entry] - get_entry = CloudDataCatalogGetEntryOperator( - task_id="get_entry", location=LOCATION, entry_group=ENTRY_GROUP_ID, entry=ENTRY_ID - ) - # [END howto_operator_gcp_datacatalog_get_entry] - - # [START howto_operator_gcp_datacatalog_get_entry_result] - get_entry_result = BashOperator(task_id="get_entry_result", bash_command=f"echo {get_entry.output}") - # [END howto_operator_gcp_datacatalog_get_entry_result] - - # Lookup - # [START howto_operator_gcp_datacatalog_lookup_entry_linked_resource] - current_entry_template = ( - "//datacatalog.googleapis.com/projects/{project_id}/locations/{location}/" - "entryGroups/{entry_group}/entries/{entry}" - ) - lookup_entry_linked_resource = CloudDataCatalogLookupEntryOperator( - task_id="lookup_entry", - linked_resource=current_entry_template.format( - project_id=PROJECT_ID, location=LOCATION, entry_group=ENTRY_GROUP_ID, entry=ENTRY_ID - ), - ) - # [END howto_operator_gcp_datacatalog_lookup_entry_linked_resource] - - # [START howto_operator_gcp_datacatalog_lookup_entry_result] - lookup_entry_result = BashOperator( - task_id="lookup_entry_result", - bash_command="echo \"{{ task_instance.xcom_pull('lookup_entry')['display_name'] }}\"", - ) - # [END howto_operator_gcp_datacatalog_lookup_entry_result] - - # Update - # [START howto_operator_gcp_datacatalog_update_entry] - update_entry = CloudDataCatalogUpdateEntryOperator( - task_id="update_entry", - entry={"display_name": f"{ENTRY_NAME} UPDATED"}, - update_mask={"paths": ["display_name"]}, - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry_id=ENTRY_ID, - ) - # [END howto_operator_gcp_datacatalog_update_entry] - - # Delete - # [START howto_operator_gcp_datacatalog_delete_entry] - delete_entry = CloudDataCatalogDeleteEntryOperator( - task_id="delete_entry", location=LOCATION, entry_group=ENTRY_GROUP_ID, entry=ENTRY_ID - ) - # [END howto_operator_gcp_datacatalog_delete_entry] - delete_entry.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_entry_group] - delete_entry_group = CloudDataCatalogDeleteEntryGroupOperator( - task_id="delete_entry_group", location=LOCATION, entry_group=ENTRY_GROUP_ID - ) - # [END howto_operator_gcp_datacatalog_delete_entry_group] - delete_entry_group.trigger_rule = TriggerRule.ALL_DONE - - delete_bucket = GCSDeleteBucketOperator( - task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE - ) - - ( - # TEST SETUP - create_bucket - # TEST BODY - >> create_entry_group - >> create_entry_group_result - >> get_entry_group - >> get_entry_group_result - >> create_entry_gcs - >> create_entry_gcs_result - >> get_entry - >> get_entry_result - >> lookup_entry_linked_resource - >> lookup_entry_result - >> update_entry - >> delete_entry - >> delete_entry_group - # TEST TEARDOWN - >> delete_bucket - ) - - # ### Everything below this line is not part of example ### - # ### Just for system tests purpose ### - from tests_common.test_utils.watcher import watcher - - # This test needs watcher in order to properly mark success/failure - # when "tearDown" task with trigger rule is part of the DAG - list(dag.tasks) >> watcher() - - -from tests_common.test_utils.system_tests import get_test_run # noqa: E402 - -# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) -test_run = get_test_run(dag) diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py deleted file mode 100644 index f778a08425ce5..0000000000000 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_search_catalog.py +++ /dev/null @@ -1,236 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os -from datetime import datetime -from typing import cast - -from google.cloud.datacatalog import TagField, TagTemplateField - -from airflow.models.dag import DAG -from airflow.models.xcom_arg import XComArg -from airflow.providers.google.cloud.operators.datacatalog import ( - CloudDataCatalogCreateEntryGroupOperator, - CloudDataCatalogCreateEntryOperator, - CloudDataCatalogCreateTagOperator, - CloudDataCatalogCreateTagTemplateOperator, - CloudDataCatalogDeleteEntryGroupOperator, - CloudDataCatalogDeleteEntryOperator, - CloudDataCatalogDeleteTagOperator, - CloudDataCatalogDeleteTagTemplateOperator, - CloudDataCatalogSearchCatalogOperator, -) -from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator -from airflow.providers.standard.operators.bash import BashOperator -from airflow.utils.trigger_rule import TriggerRule - -from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") -PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -DAG_ID = "datacatalog_search_catalog" - -BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}" -LOCATION = "us-central1" -ENTRY_GROUP_ID = f"id_{DAG_ID}_{ENV_ID}".replace("-", "_") -ENTRY_GROUP_NAME = f"name {DAG_ID} {ENV_ID}" -ENTRY_ID = "python_files" -ENTRY_NAME = "Wizard" -TEMPLATE_ID = f"template_id_search_{ENV_ID}".replace("-", "_") -TAG_TEMPLATE_DISPLAY_NAME = f"Data Catalog {DAG_ID} {ENV_ID}" -FIELD_NAME_1 = "first" - -with DAG( - DAG_ID, - schedule="@once", - start_date=datetime(2021, 1, 1), - catchup=False, -) as dag: - create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME) - - # Create - # [START howto_operator_gcp_datacatalog_create_entry_group] - create_entry_group = CloudDataCatalogCreateEntryGroupOperator( - task_id="create_entry_group", - location=LOCATION, - entry_group_id=ENTRY_GROUP_ID, - entry_group={"display_name": ENTRY_GROUP_NAME}, - ) - # [END howto_operator_gcp_datacatalog_create_entry_group] - - # [START howto_operator_gcp_datacatalog_create_entry_group_result] - create_entry_group_result = BashOperator( - task_id="create_entry_group_result", - bash_command=f"echo {XComArg(create_entry_group, key='entry_group_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_entry_group_result] - - # [START howto_operator_gcp_datacatalog_create_entry_gcs] - create_entry_gcs = CloudDataCatalogCreateEntryOperator( - task_id="create_entry_gcs", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry_id=ENTRY_ID, - entry={ - "display_name": ENTRY_NAME, - "type_": "FILESET", - "gcs_fileset_spec": {"file_patterns": [f"gs://{BUCKET_NAME}/**"]}, - }, - ) - # [END howto_operator_gcp_datacatalog_create_entry_gcs] - - # [START howto_operator_gcp_datacatalog_create_entry_gcs_result] - create_entry_gcs_result = BashOperator( - task_id="create_entry_gcs_result", - bash_command=f"echo {XComArg(create_entry_gcs, key='entry_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_entry_gcs_result] - - # [START howto_operator_gcp_datacatalog_create_tag] - create_tag = CloudDataCatalogCreateTagOperator( - task_id="create_tag", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry=ENTRY_ID, - template_id=TEMPLATE_ID, - tag={"fields": {FIELD_NAME_1: TagField(string_value="example-value-string")}}, - ) - # [END howto_operator_gcp_datacatalog_create_tag] - - tag_id = cast("str", XComArg(create_tag, key="tag_id")) - - # [START howto_operator_gcp_datacatalog_create_tag_result] - create_tag_result = BashOperator( - task_id="create_tag_result", - bash_command=f"echo {tag_id}", - ) - # [END howto_operator_gcp_datacatalog_create_tag_result] - - # [START howto_operator_gcp_datacatalog_create_tag_template] - create_tag_template = CloudDataCatalogCreateTagTemplateOperator( - task_id="create_tag_template", - location=LOCATION, - tag_template_id=TEMPLATE_ID, - tag_template={ - "display_name": TAG_TEMPLATE_DISPLAY_NAME, - "fields": { - FIELD_NAME_1: TagTemplateField( - display_name="first-field", type_=dict(primitive_type="STRING") - ) - }, - }, - ) - # [END howto_operator_gcp_datacatalog_create_tag_template] - - # [START howto_operator_gcp_datacatalog_create_tag_template_result] - create_tag_template_result = BashOperator( - task_id="create_tag_template_result", - bash_command=f"echo {XComArg(create_tag_template, key='tag_template_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_tag_template_result] - - # Search - # [START howto_operator_gcp_datacatalog_search_catalog] - search_catalog = CloudDataCatalogSearchCatalogOperator( - task_id="search_catalog", - scope={"include_project_ids": [PROJECT_ID]}, - query=f"name:{ENTRY_GROUP_NAME}", - ) - # [END howto_operator_gcp_datacatalog_search_catalog] - - # [START howto_operator_gcp_datacatalog_search_catalog_result] - search_catalog_result = BashOperator( - task_id="search_catalog_result", - bash_command=f"echo {search_catalog.output}", - ) - # [END howto_operator_gcp_datacatalog_search_catalog_result] - - # Delete - # [START howto_operator_gcp_datacatalog_delete_entry] - delete_entry = CloudDataCatalogDeleteEntryOperator( - task_id="delete_entry", location=LOCATION, entry_group=ENTRY_GROUP_ID, entry=ENTRY_ID - ) - # [END howto_operator_gcp_datacatalog_delete_entry] - delete_entry.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_entry_group] - delete_entry_group = CloudDataCatalogDeleteEntryGroupOperator( - task_id="delete_entry_group", location=LOCATION, entry_group=ENTRY_GROUP_ID - ) - # [END howto_operator_gcp_datacatalog_delete_entry_group] - delete_entry_group.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_tag] - delete_tag = CloudDataCatalogDeleteTagOperator( - task_id="delete_tag", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry=ENTRY_ID, - tag=tag_id, - ) - # [END howto_operator_gcp_datacatalog_delete_tag] - delete_tag.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_tag_template] - delete_tag_template = CloudDataCatalogDeleteTagTemplateOperator( - task_id="delete_tag_template", location=LOCATION, tag_template=TEMPLATE_ID, force=True - ) - # [END howto_operator_gcp_datacatalog_delete_tag_template] - delete_tag_template.trigger_rule = TriggerRule.ALL_DONE - - delete_bucket = GCSDeleteBucketOperator( - task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE - ) - - ( - # TEST SETUP - create_bucket - # TEST BODY - >> create_entry_group - >> create_entry_group_result - >> create_entry_gcs - >> create_entry_gcs_result - >> create_tag_template - >> create_tag_template_result - >> create_tag - >> create_tag_result - >> search_catalog - >> search_catalog_result - >> delete_tag - >> delete_tag_template - >> delete_entry - >> delete_entry_group - # TEST TEARDOWN - >> delete_bucket - ) - - # ### Everything below this line is not part of example ### - # ### Just for system tests purpose ### - from tests_common.test_utils.watcher import watcher - - # This test needs watcher in order to properly mark success/failure - # when "tearDown" task with trigger rule is part of the DAG - list(dag.tasks) >> watcher() - - -from tests_common.test_utils.system_tests import get_test_run # noqa: E402 - -# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) -test_run = get_test_run(dag) diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py deleted file mode 100644 index b256fedb9cb41..0000000000000 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_tag_templates.py +++ /dev/null @@ -1,196 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os -from datetime import datetime - -from google.cloud.datacatalog import FieldType, TagTemplateField - -from airflow.models.dag import DAG -from airflow.models.xcom_arg import XComArg -from airflow.providers.google.cloud.operators.datacatalog import ( - CloudDataCatalogCreateTagTemplateFieldOperator, - CloudDataCatalogCreateTagTemplateOperator, - CloudDataCatalogDeleteTagTemplateFieldOperator, - CloudDataCatalogDeleteTagTemplateOperator, - CloudDataCatalogGetTagTemplateOperator, - CloudDataCatalogRenameTagTemplateFieldOperator, - CloudDataCatalogUpdateTagTemplateFieldOperator, - CloudDataCatalogUpdateTagTemplateOperator, -) -from airflow.providers.standard.operators.bash import BashOperator -from airflow.utils.trigger_rule import TriggerRule - -from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") -PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -DAG_ID = "datacatalog_tag_templates" - -LOCATION = "us-central1" -TEMPLATE_ID = f"template_id_templ_{ENV_ID}" -TAG_TEMPLATE_DISPLAY_NAME = f"Data Catalog {DAG_ID} {ENV_ID}" -FIELD_NAME_1 = "first" -FIELD_NAME_2 = "second" -FIELD_NAME_3 = "first-rename" - -with DAG( - DAG_ID, - schedule="@once", - start_date=datetime(2021, 1, 1), - catchup=False, -) as dag: - # Create - # [START howto_operator_gcp_datacatalog_create_tag_template] - create_tag_template = CloudDataCatalogCreateTagTemplateOperator( - task_id="create_tag_template", - location=LOCATION, - tag_template_id=TEMPLATE_ID, - tag_template={ - "display_name": TAG_TEMPLATE_DISPLAY_NAME, - "fields": { - FIELD_NAME_1: TagTemplateField( - display_name="first-field", type_=dict(primitive_type="STRING") - ) - }, - }, - ) - # [END howto_operator_gcp_datacatalog_create_tag_template] - - # [START howto_operator_gcp_datacatalog_create_tag_template_result] - create_tag_template_result = BashOperator( - task_id="create_tag_template_result", - bash_command=f"echo {XComArg(create_tag_template, key='tag_template_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_tag_template_result] - - # [START howto_operator_gcp_datacatalog_create_tag_template_field] - create_tag_template_field = CloudDataCatalogCreateTagTemplateFieldOperator( - task_id="create_tag_template_field", - location=LOCATION, - tag_template=TEMPLATE_ID, - tag_template_field_id=FIELD_NAME_2, - tag_template_field=TagTemplateField( - display_name="second-field", type_=FieldType(primitive_type="STRING") - ), - ) - # [END howto_operator_gcp_datacatalog_create_tag_template_field] - - # [START howto_operator_gcp_datacatalog_create_tag_template_field_result] - create_tag_template_field_result = BashOperator( - task_id="create_tag_template_field_result", - bash_command=f"echo {XComArg(create_tag_template_field, key='tag_template_field_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_tag_template_field_result] - - # Get - # [START howto_operator_gcp_datacatalog_get_tag_template] - get_tag_template = CloudDataCatalogGetTagTemplateOperator( - task_id="get_tag_template", location=LOCATION, tag_template=TEMPLATE_ID - ) - # [END howto_operator_gcp_datacatalog_get_tag_template] - - # [START howto_operator_gcp_datacatalog_get_tag_template_result] - get_tag_template_result = BashOperator( - task_id="get_tag_template_result", - bash_command=f"echo {get_tag_template.output}", - ) - # [END howto_operator_gcp_datacatalog_get_tag_template_result] - - # Rename - # [START howto_operator_gcp_datacatalog_rename_tag_template_field] - rename_tag_template_field = CloudDataCatalogRenameTagTemplateFieldOperator( - task_id="rename_tag_template_field", - location=LOCATION, - tag_template=TEMPLATE_ID, - field=FIELD_NAME_1, - new_tag_template_field_id=FIELD_NAME_3, - ) - # [END howto_operator_gcp_datacatalog_rename_tag_template_field] - - # Update - # [START howto_operator_gcp_datacatalog_update_tag_template] - update_tag_template = CloudDataCatalogUpdateTagTemplateOperator( - task_id="update_tag_template", - tag_template={"display_name": f"{TAG_TEMPLATE_DISPLAY_NAME} UPDATED"}, - update_mask={"paths": ["display_name"]}, - location=LOCATION, - tag_template_id=TEMPLATE_ID, - ) - # [END howto_operator_gcp_datacatalog_update_tag_template] - - # [START howto_operator_gcp_datacatalog_update_tag_template_field] - update_tag_template_field = CloudDataCatalogUpdateTagTemplateFieldOperator( - task_id="update_tag_template_field", - tag_template_field={"display_name": "Updated template field"}, - update_mask={"paths": ["display_name"]}, - location=LOCATION, - tag_template=TEMPLATE_ID, - tag_template_field_id=FIELD_NAME_1, - ) - # [END howto_operator_gcp_datacatalog_update_tag_template_field] - - # Delete - # [START howto_operator_gcp_datacatalog_delete_tag_template_field] - delete_tag_template_field = CloudDataCatalogDeleteTagTemplateFieldOperator( - task_id="delete_tag_template_field", - location=LOCATION, - tag_template=TEMPLATE_ID, - field=FIELD_NAME_2, - force=True, - ) - # [END howto_operator_gcp_datacatalog_delete_tag_template_field] - delete_tag_template_field.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_tag_template] - delete_tag_template = CloudDataCatalogDeleteTagTemplateOperator( - task_id="delete_tag_template", location=LOCATION, tag_template=TEMPLATE_ID, force=True - ) - # [END howto_operator_gcp_datacatalog_delete_tag_template] - delete_tag_template.trigger_rule = TriggerRule.ALL_DONE - - ( - # TEST BODY - create_tag_template - >> create_tag_template_result - >> create_tag_template_field - >> create_tag_template_field_result - >> get_tag_template - >> get_tag_template_result - >> update_tag_template - >> update_tag_template_field - >> rename_tag_template_field - >> delete_tag_template_field - >> delete_tag_template - ) - - # ### Everything below this line is not part of example ### - # ### Just for system tests purpose ### - from tests_common.test_utils.watcher import watcher - - # This test needs watcher in order to properly mark success/failure - # when "tearDown" task with trigger rule is part of the DAG - list(dag.tasks) >> watcher() - - -from tests_common.test_utils.system_tests import get_test_run # noqa: E402 - -# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) -test_run = get_test_run(dag) diff --git a/providers/google/tests/system/google/datacatalog/example_datacatalog_tags.py b/providers/google/tests/system/google/datacatalog/example_datacatalog_tags.py deleted file mode 100644 index c8ab89239d9e5..0000000000000 --- a/providers/google/tests/system/google/datacatalog/example_datacatalog_tags.py +++ /dev/null @@ -1,246 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import os -from datetime import datetime -from typing import cast - -from google.cloud.datacatalog import TagField, TagTemplateField - -from airflow.models.dag import DAG -from airflow.models.xcom_arg import XComArg -from airflow.providers.google.cloud.operators.datacatalog import ( - CloudDataCatalogCreateEntryGroupOperator, - CloudDataCatalogCreateEntryOperator, - CloudDataCatalogCreateTagOperator, - CloudDataCatalogCreateTagTemplateOperator, - CloudDataCatalogDeleteEntryGroupOperator, - CloudDataCatalogDeleteEntryOperator, - CloudDataCatalogDeleteTagOperator, - CloudDataCatalogDeleteTagTemplateOperator, - CloudDataCatalogListTagsOperator, - CloudDataCatalogUpdateTagOperator, -) -from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator -from airflow.providers.standard.operators.bash import BashOperator -from airflow.utils.trigger_rule import TriggerRule - -from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") -PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID - -DAG_ID = "datacatalog_tags" - -BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}" -LOCATION = "us-central1" -ENTRY_GROUP_ID = f"id_{DAG_ID}_{ENV_ID}" -ENTRY_GROUP_NAME = f"name {DAG_ID} {ENV_ID}" -ENTRY_ID = "python_files" -ENTRY_NAME = "Wizard" -TEMPLATE_ID = f"template_id_tags_{ENV_ID}" -TAG_TEMPLATE_DISPLAY_NAME = f"Data Catalog {DAG_ID} {ENV_ID}" -FIELD_NAME_1 = "first" - -with DAG( - DAG_ID, - schedule="@once", - start_date=datetime(2021, 1, 1), - catchup=False, -) as dag: - create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME) - - # Create - # [START howto_operator_gcp_datacatalog_create_entry_group] - create_entry_group = CloudDataCatalogCreateEntryGroupOperator( - task_id="create_entry_group", - location=LOCATION, - entry_group_id=ENTRY_GROUP_ID, - entry_group={"display_name": ENTRY_GROUP_NAME}, - ) - # [END howto_operator_gcp_datacatalog_create_entry_group] - - # [START howto_operator_gcp_datacatalog_create_entry_group_result] - create_entry_group_result = BashOperator( - task_id="create_entry_group_result", - bash_command=f"echo {XComArg(create_entry_group, key='entry_group_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_entry_group_result] - - # [START howto_operator_gcp_datacatalog_create_entry_gcs] - create_entry_gcs = CloudDataCatalogCreateEntryOperator( - task_id="create_entry_gcs", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry_id=ENTRY_ID, - entry={ - "display_name": ENTRY_NAME, - "type_": "FILESET", - "gcs_fileset_spec": {"file_patterns": [f"gs://{BUCKET_NAME}/**"]}, - }, - ) - # [END howto_operator_gcp_datacatalog_create_entry_gcs] - - # [START howto_operator_gcp_datacatalog_create_entry_gcs_result] - create_entry_gcs_result = BashOperator( - task_id="create_entry_gcs_result", - bash_command=f"echo {XComArg(create_entry_gcs, key='entry_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_entry_gcs_result] - - # [START howto_operator_gcp_datacatalog_create_tag] - create_tag = CloudDataCatalogCreateTagOperator( - task_id="create_tag", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry=ENTRY_ID, - template_id=TEMPLATE_ID, - tag={"fields": {FIELD_NAME_1: TagField(string_value="example-value-string")}}, - ) - # [END howto_operator_gcp_datacatalog_create_tag] - - tag_id = cast("str", XComArg(create_tag, key="tag_id")) - - # [START howto_operator_gcp_datacatalog_create_tag_result] - create_tag_result = BashOperator( - task_id="create_tag_result", - bash_command=f"echo {tag_id}", - ) - # [END howto_operator_gcp_datacatalog_create_tag_result] - - # [START howto_operator_gcp_datacatalog_create_tag_template] - create_tag_template = CloudDataCatalogCreateTagTemplateOperator( - task_id="create_tag_template", - location=LOCATION, - tag_template_id=TEMPLATE_ID, - tag_template={ - "display_name": TAG_TEMPLATE_DISPLAY_NAME, - "fields": { - FIELD_NAME_1: TagTemplateField( - display_name="first-field", type_=dict(primitive_type="STRING") - ) - }, - }, - ) - # [END howto_operator_gcp_datacatalog_create_tag_template] - - # [START howto_operator_gcp_datacatalog_create_tag_template_result] - create_tag_template_result = BashOperator( - task_id="create_tag_template_result", - bash_command=f"echo {XComArg(create_tag_template, key='tag_template_id')}", - ) - # [END howto_operator_gcp_datacatalog_create_tag_template_result] - - # List - # [START howto_operator_gcp_datacatalog_list_tags] - list_tags = CloudDataCatalogListTagsOperator( - task_id="list_tags", location=LOCATION, entry_group=ENTRY_GROUP_ID, entry=ENTRY_ID - ) - # [END howto_operator_gcp_datacatalog_list_tags] - - # [START howto_operator_gcp_datacatalog_list_tags_result] - list_tags_result = BashOperator(task_id="list_tags_result", bash_command=f"echo {list_tags.output}") - # [END howto_operator_gcp_datacatalog_list_tags_result] - - # Update - # [START howto_operator_gcp_datacatalog_update_tag] - update_tag = CloudDataCatalogUpdateTagOperator( - task_id="update_tag", - tag={"fields": {FIELD_NAME_1: TagField(string_value="new-value-string")}}, - update_mask={"paths": ["fields"]}, - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry=ENTRY_ID, - tag_id=tag_id, - ) - # [END howto_operator_gcp_datacatalog_update_tag] - - # # Delete - # [START howto_operator_gcp_datacatalog_delete_entry] - delete_entry = CloudDataCatalogDeleteEntryOperator( - task_id="delete_entry", location=LOCATION, entry_group=ENTRY_GROUP_ID, entry=ENTRY_ID - ) - # [END howto_operator_gcp_datacatalog_delete_entry] - delete_entry.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_entry_group] - delete_entry_group = CloudDataCatalogDeleteEntryGroupOperator( - task_id="delete_entry_group", location=LOCATION, entry_group=ENTRY_GROUP_ID - ) - # [END howto_operator_gcp_datacatalog_delete_entry_group] - delete_entry_group.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_tag] - delete_tag = CloudDataCatalogDeleteTagOperator( - task_id="delete_tag", - location=LOCATION, - entry_group=ENTRY_GROUP_ID, - entry=ENTRY_ID, - tag=tag_id, - ) - # [END howto_operator_gcp_datacatalog_delete_tag] - delete_tag.trigger_rule = TriggerRule.ALL_DONE - - # [START howto_operator_gcp_datacatalog_delete_tag_template] - delete_tag_template = CloudDataCatalogDeleteTagTemplateOperator( - task_id="delete_tag_template", location=LOCATION, tag_template=TEMPLATE_ID, force=True - ) - # [END howto_operator_gcp_datacatalog_delete_tag_template] - delete_tag_template.trigger_rule = TriggerRule.ALL_DONE - - delete_bucket = GCSDeleteBucketOperator( - task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE - ) - - ( - # TEST SETUP - create_bucket - # TEST BODY - >> create_entry_group - >> create_entry_group_result - >> create_entry_gcs - >> create_entry_gcs_result - >> create_tag_template - >> create_tag_template_result - >> create_tag - >> create_tag_result - >> list_tags - >> list_tags_result - >> update_tag - >> delete_tag - >> delete_tag_template - >> delete_entry - >> delete_entry_group - # TEST TEARDOWN - >> delete_bucket - ) - - # ### Everything below this line is not part of example ### - # ### Just for system tests purpose ### - from tests_common.test_utils.watcher import watcher - - # This test needs watcher in order to properly mark success/failure - # when "tearDown" task with trigger rule is part of the DAG - list(dag.tasks) >> watcher() - - -from tests_common.test_utils.system_tests import get_test_run # noqa: E402 - -# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) -test_run = get_test_run(dag) diff --git a/providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py b/providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py index 0c3ffd257832e..83954e6eab548 100644 --- a/providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py +++ b/providers/google/tests/unit/google/cloud/hooks/test_datacatalog.py @@ -27,7 +27,7 @@ from google.cloud.datacatalog import CreateTagRequest, CreateTagTemplateRequest, Entry, Tag, TagTemplate from google.protobuf.field_mask_pb2 import FieldMask -from airflow.exceptions import AirflowException +from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.google.cloud.hooks.datacatalog import CloudDataCatalogHook from unit.google.cloud.utils.base_gcp_mock import ( @@ -87,11 +87,12 @@ class TestCloudDataCatalog: def setup_method(self): - with mock.patch( - "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", - new=mock_base_gcp_hook_default_project_id, - ): - self.hook = CloudDataCatalogHook(gcp_conn_id="test") + with pytest.warns(AirflowProviderDeprecationWarning): + with mock.patch( + "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", + new=mock_base_gcp_hook_default_project_id, + ): + self.hook = CloudDataCatalogHook(gcp_conn_id="test") @mock.patch( "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.get_credentials_and_project_id", @@ -166,11 +167,12 @@ def test_search_catalog(self, mock_get_conn, mock_get_creds_and_project_id) -> N class TestCloudDataCatalogWithDefaultProjectIdHook: def setup_method(self): - with mock.patch( - "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", - new=mock_base_gcp_hook_default_project_id, - ): - self.hook = CloudDataCatalogHook(gcp_conn_id="test") + with pytest.warns(AirflowProviderDeprecationWarning): + with mock.patch( + "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", + new=mock_base_gcp_hook_default_project_id, + ): + self.hook = CloudDataCatalogHook(gcp_conn_id="test") @mock.patch( "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.get_credentials_and_project_id", @@ -695,11 +697,12 @@ def test_update_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje class TestCloudDataCatalogWithoutDefaultProjectIdHook: def setup_method(self): - with mock.patch( - "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", - new=mock_base_gcp_hook_no_default_project_id, - ): - self.hook = CloudDataCatalogHook(gcp_conn_id="test") + with pytest.warns(AirflowProviderDeprecationWarning): + with mock.patch( + "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", + new=mock_base_gcp_hook_no_default_project_id, + ): + self.hook = CloudDataCatalogHook(gcp_conn_id="test") @mock.patch( "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.get_credentials_and_project_id", @@ -1231,11 +1234,12 @@ def test_update_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje class TestCloudDataCatalogMissingProjectIdHook: def setup_method(self): - with mock.patch( - "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", - new=mock_base_gcp_hook_no_default_project_id, - ): - self.hook = CloudDataCatalogHook(gcp_conn_id="test") + with pytest.warns(AirflowProviderDeprecationWarning): + with mock.patch( + "airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.__init__", + new=mock_base_gcp_hook_no_default_project_id, + ): + self.hook = CloudDataCatalogHook(gcp_conn_id="test") @mock.patch( "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.get_credentials_and_project_id", diff --git a/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py b/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py index c1f4b12656f08..07d9a610ca40f 100644 --- a/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py +++ b/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py @@ -20,11 +20,13 @@ from typing import TYPE_CHECKING from unittest import mock +import pytest from google.api_core.exceptions import AlreadyExists from google.api_core.retry import Retry from google.cloud.datacatalog import Entry, EntryGroup, Tag, TagTemplate, TagTemplateField from google.protobuf.field_mask_pb2 import FieldMask +from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.google.cloud.operators.datacatalog import ( CloudDataCatalogCreateEntryGroupOperator, CloudDataCatalogCreateEntryOperator, @@ -145,19 +147,20 @@ class TestCloudDataCatalogCreateEntryOperator: ) @mock.patch(BASE_PATH.format("CloudDataCatalogCreateEntryOperator.xcom_push")) def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: - task = CloudDataCatalogCreateEntryOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry_id=TEST_ENTRY_ID, - entry=TEST_ENTRY, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogCreateEntryOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry_id=TEST_ENTRY_ID, + entry=TEST_ENTRY, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) context = mock.MagicMock() result = task.execute(context=context) mock_hook.assert_called_once_with( @@ -192,19 +195,20 @@ def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: def test_assert_valid_hook_call_when_exists(self, mock_xcom, mock_hook) -> None: mock_hook.return_value.create_entry.side_effect = AlreadyExists(message="message") mock_hook.return_value.get_entry.return_value = TEST_ENTRY - task = CloudDataCatalogCreateEntryOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry_id=TEST_ENTRY_ID, - entry=TEST_ENTRY, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogCreateEntryOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry_id=TEST_ENTRY_ID, + entry=TEST_ENTRY, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) context = mock.MagicMock() result = task.execute(context=context) mock_hook.assert_called_once_with( @@ -250,18 +254,19 @@ class TestCloudDataCatalogCreateEntryGroupOperator: ) @mock.patch(BASE_PATH.format("CloudDataCatalogCreateEntryGroupOperator.xcom_push")) def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: - task = CloudDataCatalogCreateEntryGroupOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group_id=TEST_ENTRY_GROUP_ID, - entry_group=TEST_ENTRY_GROUP, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogCreateEntryGroupOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group_id=TEST_ENTRY_GROUP_ID, + entry_group=TEST_ENTRY_GROUP, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) context = mock.MagicMock() result = task.execute(context=context) mock_hook.assert_called_once_with( @@ -296,20 +301,21 @@ class TestCloudDataCatalogCreateTagOperator: ) @mock.patch(BASE_PATH.format("CloudDataCatalogCreateTagOperator.xcom_push")) def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: - task = CloudDataCatalogCreateTagOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry=TEST_ENTRY_ID, - tag=TEST_TAG, - template_id=TEST_TAG_TEMPLATE_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogCreateTagOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry=TEST_ENTRY_ID, + tag=TEST_TAG, + template_id=TEST_TAG_TEMPLATE_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) context = mock.MagicMock() result = task.execute(context=context) mock_hook.assert_called_once_with( @@ -347,18 +353,19 @@ class TestCloudDataCatalogCreateTagTemplateOperator: ) @mock.patch(BASE_PATH.format("CloudDataCatalogCreateTagTemplateOperator.xcom_push")) def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: - task = CloudDataCatalogCreateTagTemplateOperator( - task_id="task_id", - location=TEST_LOCATION, - tag_template_id=TEST_TAG_TEMPLATE_ID, - tag_template=TEST_TAG_TEMPLATE, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogCreateTagTemplateOperator( + task_id="task_id", + location=TEST_LOCATION, + tag_template_id=TEST_TAG_TEMPLATE_ID, + tag_template=TEST_TAG_TEMPLATE, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) context = mock.MagicMock() result = task.execute(context=context) mock_hook.assert_called_once_with( @@ -393,19 +400,20 @@ class TestCloudDataCatalogCreateTagTemplateFieldOperator: ) @mock.patch(BASE_PATH.format("CloudDataCatalogCreateTagTemplateFieldOperator.xcom_push")) def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: - task = CloudDataCatalogCreateTagTemplateFieldOperator( - task_id="task_id", - location=TEST_LOCATION, - tag_template=TEST_TAG_TEMPLATE_ID, - tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, - tag_template_field=TEST_TAG_TEMPLATE_FIELD, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogCreateTagTemplateFieldOperator( + task_id="task_id", + location=TEST_LOCATION, + tag_template=TEST_TAG_TEMPLATE_ID, + tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, + tag_template_field=TEST_TAG_TEMPLATE_FIELD, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) context = mock.MagicMock() result = task.execute(context=context) mock_hook.assert_called_once_with( @@ -437,18 +445,19 @@ def test_assert_valid_hook_call(self, mock_xcom, mock_hook) -> None: class TestCloudDataCatalogDeleteEntryOperator: @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogDeleteEntryOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry=TEST_ENTRY_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogDeleteEntryOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry=TEST_ENTRY_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -468,17 +477,18 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogDeleteEntryGroupOperator: @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogDeleteEntryGroupOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogDeleteEntryGroupOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -497,19 +507,20 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogDeleteTagOperator: @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogDeleteTagOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry=TEST_ENTRY_ID, - tag=TEST_TAG_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogDeleteTagOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry=TEST_ENTRY_ID, + tag=TEST_TAG_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -530,18 +541,19 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogDeleteTagTemplateOperator: @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogDeleteTagTemplateOperator( - task_id="task_id", - location=TEST_LOCATION, - tag_template=TEST_TAG_TEMPLATE_ID, - force=TEST_FORCE, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogDeleteTagTemplateOperator( + task_id="task_id", + location=TEST_LOCATION, + tag_template=TEST_TAG_TEMPLATE_ID, + force=TEST_FORCE, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -561,19 +573,20 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogDeleteTagTemplateFieldOperator: @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogDeleteTagTemplateFieldOperator( - task_id="task_id", - location=TEST_LOCATION, - tag_template=TEST_TAG_TEMPLATE_ID, - field=TEST_TAG_TEMPLATE_FIELD_ID, - force=TEST_FORCE, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogDeleteTagTemplateFieldOperator( + task_id="task_id", + location=TEST_LOCATION, + tag_template=TEST_TAG_TEMPLATE_ID, + field=TEST_TAG_TEMPLATE_FIELD_ID, + force=TEST_FORCE, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -597,18 +610,19 @@ class TestCloudDataCatalogGetEntryOperator: **{"return_value.get_entry.return_value": TEST_ENTRY}, # type: ignore ) def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogGetEntryOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry=TEST_ENTRY_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogGetEntryOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry=TEST_ENTRY_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -631,18 +645,19 @@ class TestCloudDataCatalogGetEntryGroupOperator: **{"return_value.get_entry_group.return_value": TEST_ENTRY_GROUP}, # type: ignore ) def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogGetEntryGroupOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - read_mask=TEST_READ_MASK, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogGetEntryGroupOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + read_mask=TEST_READ_MASK, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -665,17 +680,18 @@ class TestCloudDataCatalogGetTagTemplateOperator: **{"return_value.get_tag_template.return_value": TEST_TAG_TEMPLATE}, # type: ignore ) def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogGetTagTemplateOperator( - task_id="task_id", - location=TEST_LOCATION, - tag_template=TEST_TAG_TEMPLATE_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogGetTagTemplateOperator( + task_id="task_id", + location=TEST_LOCATION, + tag_template=TEST_TAG_TEMPLATE_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -697,19 +713,20 @@ class TestCloudDataCatalogListTagsOperator: return_value=mock.MagicMock(list_tags=mock.MagicMock(return_value=[TEST_TAG])), ) def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogListTagsOperator( - task_id="task_id", - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry=TEST_ENTRY_ID, - page_size=TEST_PAGE_SIZE, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogListTagsOperator( + task_id="task_id", + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry=TEST_ENTRY_ID, + page_size=TEST_PAGE_SIZE, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -733,16 +750,17 @@ class TestCloudDataCatalogLookupEntryOperator: **{"return_value.lookup_entry.return_value": TEST_ENTRY}, # type: ignore ) def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogLookupEntryOperator( - task_id="task_id", - linked_resource=TEST_LINKED_RESOURCE, - sql_resource=TEST_SQL_RESOURCE, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogLookupEntryOperator( + task_id="task_id", + linked_resource=TEST_LINKED_RESOURCE, + sql_resource=TEST_SQL_RESOURCE, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -760,19 +778,20 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogRenameTagTemplateFieldOperator: @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogRenameTagTemplateFieldOperator( - task_id="task_id", - location=TEST_LOCATION, - tag_template=TEST_TAG_TEMPLATE_ID, - field=TEST_TAG_TEMPLATE_FIELD_ID, - new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogRenameTagTemplateFieldOperator( + task_id="task_id", + location=TEST_LOCATION, + tag_template=TEST_TAG_TEMPLATE_ID, + field=TEST_TAG_TEMPLATE_FIELD_ID, + new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -793,18 +812,19 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogSearchCatalogOperator: @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") def test_assert_valid_hook_call(self, mock_hook) -> None: - task = CloudDataCatalogSearchCatalogOperator( - task_id="task_id", - scope=TEST_SCOPE, - query=TEST_QUERY, - page_size=TEST_PAGE_SIZE, - order_by=TEST_ORDER_BY, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogSearchCatalogOperator( + task_id="task_id", + scope=TEST_SCOPE, + query=TEST_QUERY, + page_size=TEST_PAGE_SIZE, + order_by=TEST_ORDER_BY, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -830,20 +850,21 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: entry_group_id=TEST_ENTRY_GROUP_ID, entry_id=TEST_ENTRY_ID, ) - task = CloudDataCatalogUpdateEntryOperator( - task_id="task_id", - entry=TEST_ENTRY, - update_mask=TEST_UPDATE_MASK, - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry_id=TEST_ENTRY_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogUpdateEntryOperator( + task_id="task_id", + entry=TEST_ENTRY, + update_mask=TEST_UPDATE_MASK, + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry_id=TEST_ENTRY_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -871,21 +892,22 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: entry_group_id=TEST_ENTRY_GROUP_ID, entry_id=TEST_ENTRY_ID, ) - task = CloudDataCatalogUpdateTagOperator( - task_id="task_id", - tag=Tag(name=TEST_TAG_ID), - update_mask=TEST_UPDATE_MASK, - location=TEST_LOCATION, - entry_group=TEST_ENTRY_GROUP_ID, - entry=TEST_ENTRY_ID, - tag_id=TEST_TAG_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogUpdateTagOperator( + task_id="task_id", + tag=Tag(name=TEST_TAG_ID), + update_mask=TEST_UPDATE_MASK, + location=TEST_LOCATION, + entry_group=TEST_ENTRY_GROUP_ID, + entry=TEST_ENTRY_ID, + tag_id=TEST_TAG_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -913,19 +935,20 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: location=TEST_LOCATION, tag_template_id=TEST_TAG_TEMPLATE_ID, ) - task = CloudDataCatalogUpdateTagTemplateOperator( - task_id="task_id", - tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_ID), - update_mask=TEST_UPDATE_MASK, - location=TEST_LOCATION, - tag_template_id=TEST_TAG_TEMPLATE_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogUpdateTagTemplateOperator( + task_id="task_id", + tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_ID), + update_mask=TEST_UPDATE_MASK, + location=TEST_LOCATION, + tag_template_id=TEST_TAG_TEMPLATE_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -954,21 +977,22 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, ) ) - task = CloudDataCatalogUpdateTagTemplateFieldOperator( - task_id="task_id", - tag_template_field=TEST_TAG_TEMPLATE_FIELD, - update_mask=TEST_UPDATE_MASK, - tag_template_field_name=TEST_TAG_TEMPLATE_NAME, - location=TEST_LOCATION, - tag_template=TEST_TAG_TEMPLATE_ID, - tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, - project_id=TEST_PROJECT_ID, - retry=TEST_RETRY, - timeout=TEST_TIMEOUT, - metadata=TEST_METADATA, - gcp_conn_id=TEST_GCP_CONN_ID, - impersonation_chain=TEST_IMPERSONATION_CHAIN, - ) + with pytest.warns(AirflowProviderDeprecationWarning): + task = CloudDataCatalogUpdateTagTemplateFieldOperator( + task_id="task_id", + tag_template_field=TEST_TAG_TEMPLATE_FIELD, + update_mask=TEST_UPDATE_MASK, + tag_template_field_name=TEST_TAG_TEMPLATE_NAME, + location=TEST_LOCATION, + tag_template=TEST_TAG_TEMPLATE_ID, + tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, + project_id=TEST_PROJECT_ID, + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, + gcp_conn_id=TEST_GCP_CONN_ID, + impersonation_chain=TEST_IMPERSONATION_CHAIN, + ) task.execute(context=mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, diff --git a/scripts/in_container/run_provider_yaml_files_check.py b/scripts/in_container/run_provider_yaml_files_check.py index fea1b8f44a15e..3dab9611ea508 100755 --- a/scripts/in_container/run_provider_yaml_files_check.py +++ b/scripts/in_container/run_provider_yaml_files_check.py @@ -61,6 +61,9 @@ "airflow.providers.tabular.hooks.tabular", "airflow.providers.yandex.hooks.yandexcloud_dataproc", "airflow.providers.yandex.operators.yandexcloud_dataproc", + "airflow.providers.google.cloud.hooks.datacatalog", + "airflow.providers.google.cloud.operators.datacatalog", + "airflow.providers.google.cloud.links.datacatalog", ] KNOWN_DEPRECATED_CLASSES = [