Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove <2.0.0 limit on google-cloud-bigtable #29644

Merged
merged 6 commits into from
Feb 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 10 additions & 13 deletions airflow/providers/google/cloud/hooks/bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,11 @@
import warnings
from typing import Sequence

from google.cloud.bigtable import Client
from google.cloud.bigtable import Client, enums
from google.cloud.bigtable.cluster import Cluster
from google.cloud.bigtable.column_family import ColumnFamily, GarbageCollectionRule
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.table import ClusterState, Table
from google.cloud.bigtable_admin_v2 import enums

from airflow.providers.google.common.consts import CLIENT_INFO
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
Expand Down Expand Up @@ -56,9 +55,9 @@ def __init__(
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self._client = None
self._client: Client | None = None

def _get_client(self, project_id: str):
def _get_client(self, project_id: str) -> Client:
if not self._client:
self._client = Client(
project=project_id,
Expand All @@ -69,7 +68,7 @@ def _get_client(self, project_id: str):
return self._client

@GoogleBaseHook.fallback_to_default_project_id
def get_instance(self, instance_id: str, project_id: str) -> Instance:
def get_instance(self, instance_id: str, project_id: str) -> Instance | None:
"""
Retrieves and returns the specified Cloud Bigtable instance if it exists.
Otherwise, returns None.
Expand Down Expand Up @@ -113,10 +112,10 @@ def create_instance(
project_id: str,
replica_clusters: list[dict[str, str]] | None = None,
instance_display_name: str | None = None,
instance_type: enums.Instance.Type = enums.Instance.Type.TYPE_UNSPECIFIED,
instance_type: enums.Instance.Type = enums.Instance.Type.UNSPECIFIED, # type: ignore[assignment]
instance_labels: dict | None = None,
cluster_nodes: int | None = None,
cluster_storage_type: enums.StorageType = enums.StorageType.STORAGE_TYPE_UNSPECIFIED,
cluster_storage_type: enums.StorageType = enums.StorageType.UNSPECIFIED, # type: ignore[assignment]
timeout: float | None = None,
) -> Instance:
"""
Expand All @@ -142,9 +141,6 @@ def create_instance(
:param timeout: (optional) timeout (in seconds) for instance creation.
If None is not specified, Operator will wait indefinitely.
"""
cluster_storage_type = enums.StorageType(cluster_storage_type)
instance_type = enums.Instance.Type(instance_type)

instance = Instance(
instance_id,
self._get_client(project_id=project_id),
Expand Down Expand Up @@ -200,8 +196,6 @@ def update_instance(
:param timeout: (optional) timeout (in seconds) for instance update.
If None is not specified, Operator will wait indefinitely.
"""
instance_type = enums.Instance.Type(instance_type)

instance = Instance(
instance_id=instance_id,
client=self._get_client(project_id=project_id),
Expand Down Expand Up @@ -253,7 +247,10 @@ def delete_table(self, instance_id: str, table_id: str, project_id: str) -> None
BigTable exists. If set to None or missing,
the default project_id from the Google Cloud connection is used.
"""
table = self.get_instance(instance_id=instance_id, project_id=project_id).table(table_id=table_id)
instance = self.get_instance(instance_id=instance_id, project_id=project_id)
if instance is None:
raise RuntimeError("Instance %s did not exist; unable to delete table %s" % instance_id, table_id)
table = instance.table(table_id=table_id)
table.delete()

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/operators/bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@
from typing import TYPE_CHECKING, Iterable, Sequence

import google.api_core.exceptions
from google.cloud.bigtable import enums
from google.cloud.bigtable.column_family import GarbageCollectionRule
from google.cloud.bigtable_admin_v2 import enums

from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/google/cloud/sensors/bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
from typing import TYPE_CHECKING, Sequence

import google.api_core.exceptions
from google.cloud.bigtable import enums
from google.cloud.bigtable.table import ClusterState
from google.cloud.bigtable_admin_v2 import enums

from airflow.providers.google.cloud.hooks.bigtable import BigtableHook
from airflow.providers.google.cloud.links.bigtable import BigtableTablesLink
Expand Down Expand Up @@ -103,7 +103,7 @@ def poke(self, context: Context) -> bool:
)
return False

ready_state = ClusterState(enums.Table.ClusterState.ReplicationState.READY)
ready_state = ClusterState(enums.Table.ReplicationState.READY)

is_table_replicated = True
for cluster_id in cluster_states.keys():
Expand Down
5 changes: 1 addition & 4 deletions airflow/providers/google/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ dependencies:
- google-cloud-aiplatform>=1.7.1,<2.0.0
- google-cloud-automl>=2.1.0
- google-cloud-bigquery-datatransfer>=3.0.0
- google-cloud-bigtable>=1.0.0,<2.0.0
- google-cloud-bigtable>=2.0.0,<3.0.0
- google-cloud-build>=3.0.0
- google-cloud-compute>=0.1.0,<2.0.0
- google-cloud-container>=2.2.0,<3.0.0
Expand Down Expand Up @@ -127,9 +127,6 @@ dependencies:
# A transient dependency of google-cloud-bigquery-datatransfer, but we
# further constrain it since older versions are buggy.
- proto-plus>=1.19.6
# Google bigtable client require protobuf <= 3.20.0. We can remove the limitation
# when this limitation is removed
- protobuf<=3.20.0

integrations:
- integration-name: Google Analytics360
Expand Down
2 changes: 1 addition & 1 deletion docs/apache-airflow-providers-google/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ PIP package Version required
``google-cloud-aiplatform`` ``>=1.7.1,<2.0.0``
``google-cloud-automl`` ``>=2.1.0``
``google-cloud-bigquery-datatransfer`` ``>=3.0.0``
``google-cloud-bigtable`` ``>=1.0.0,<2.0.0``
``google-cloud-bigtable`` ``>=2.0.0,<3.0.0``
``google-cloud-build`` ``>=3.0.0``
``google-cloud-compute`` ``>=0.1.0,<2.0.0``
``google-cloud-container`` ``>=2.2.0,<3.0.0``
Expand Down
3 changes: 1 addition & 2 deletions generated/provider_dependencies.json
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@
"google-cloud-aiplatform>=1.7.1,<2.0.0",
"google-cloud-automl>=2.1.0",
"google-cloud-bigquery-datatransfer>=3.0.0",
"google-cloud-bigtable>=1.0.0,<2.0.0",
"google-cloud-bigtable>=2.0.0,<3.0.0",
"google-cloud-build>=3.0.0",
"google-cloud-compute>=0.1.0,<2.0.0",
"google-cloud-container>=2.2.0,<3.0.0",
Expand Down Expand Up @@ -373,7 +373,6 @@
"pandas-gbq",
"pandas>=0.17.1",
"proto-plus>=1.19.6",
"protobuf<=3.20.0",
"sqlalchemy-bigquery>=1.2.1"
],
"cross-providers-deps": [
Expand Down
3 changes: 1 addition & 2 deletions tests/providers/google/cloud/hooks/test_bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,8 @@
from unittest.mock import PropertyMock

import google
from google.cloud.bigtable import Client
from google.cloud.bigtable import Client, enums
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2 import enums

from airflow.providers.google.cloud.hooks.bigtable import BigtableHook
from airflow.providers.google.common.consts import CLIENT_INFO
Expand Down
2 changes: 1 addition & 1 deletion tests/providers/google/cloud/operators/test_bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@

import google.api_core.exceptions
import pytest
from google.cloud.bigtable import enums
from google.cloud.bigtable.column_family import MaxVersionsGCRule
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2 import enums

from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.operators.bigtable import (
Expand Down