Skip to content

Commit

Permalink
Merge pull request #1926 from tseaver/bigtable-v2-cleanups
Browse files Browse the repository at this point in the history
Remaining V2 cleanups
  • Loading branch information
tseaver authored Jun 28, 2016
2 parents 6d3ff2e + c87ca51 commit 0ac2def
Show file tree
Hide file tree
Showing 8 changed files with 168 additions and 80 deletions.
58 changes: 38 additions & 20 deletions gcloud/bigtable/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@
from gcloud.bigtable._generated_v2 import (
bigtable_pb2 as data_v2_pb2)

from gcloud.bigtable._generated import (
operations_grpc_pb2 as operations_grpc_v1_pb2)
from gcloud.bigtable._generated_v2 import (
operations_grpc_pb2 as operations_grpc_v2_pb2)

from gcloud.bigtable.instance import Instance
from gcloud.client import _ClientFactoryMixin
Expand All @@ -51,14 +51,14 @@

TABLE_STUB_FACTORY_V2 = (
table_admin_v2_pb2.beta_create_BigtableTableAdmin_stub)
TABLE_ADMIN_HOST_V2 = 'bigtabletableadmin.googleapis.com'
TABLE_ADMIN_HOST_V2 = 'bigtableadmin.googleapis.com'
"""Table Admin API request host."""
TABLE_ADMIN_PORT_V2 = 443
"""Table Admin API request port."""

INSTANCE_STUB_FACTORY_V2 = (
instance_admin_v2_pb2.beta_create_BigtableInstanceAdmin_stub)
INSTANCE_ADMIN_HOST_V2 = 'bigtableclusteradmin.googleapis.com'
INSTANCE_ADMIN_HOST_V2 = 'bigtableadmin.googleapis.com'
"""Cluster Admin API request host."""
INSTANCE_ADMIN_PORT_V2 = 443
"""Cluster Admin API request port."""
Expand All @@ -69,7 +69,7 @@
DATA_API_PORT_V2 = 443
"""Data API request port."""

OPERATIONS_STUB_FACTORY_V2 = operations_grpc_v1_pb2.beta_create_Operations_stub
OPERATIONS_STUB_FACTORY_V2 = operations_grpc_v2_pb2.beta_create_Operations_stub
OPERATIONS_API_HOST_V2 = INSTANCE_ADMIN_HOST_V2
OPERATIONS_API_PORT_V2 = INSTANCE_ADMIN_PORT_V2

Expand Down Expand Up @@ -98,14 +98,14 @@ class Client(_ClientFactoryMixin, _ClientProjectMixin):
:type project: :class:`str` or :func:`unicode <unicode>`
:param project: (Optional) The ID of the project which owns the
clusters, tables and data. If not provided, will
instances, tables and data. If not provided, will
attempt to determine from the environment.
:type credentials:
:class:`OAuth2Credentials <oauth2client.client.OAuth2Credentials>` or
:data:`NoneType <types.NoneType>`
:param credentials: (Optional) The OAuth2 Credentials to use for this
cluster. If not provided, defaults to the Google
client. If not provided, defaults to the Google
Application Default Credentials.
:type read_only: bool
Expand Down Expand Up @@ -162,7 +162,7 @@ def __init__(self, project=None, credentials=None,

# These will be set in start().
self._data_stub_internal = None
self._cluster_stub_internal = None
self._instance_stub_internal = None
self._operations_stub_internal = None
self._table_stub_internal = None

Expand Down Expand Up @@ -229,7 +229,7 @@ def _data_stub(self):
return self._data_stub_internal

@property
def _cluster_stub(self):
def _instance_stub(self):
"""Getter for the gRPC stub used for the Instance Admin API.
:rtype: :class:`grpc.beta._stub._AutoIntermediary`
Expand All @@ -240,9 +240,9 @@ def _cluster_stub(self):
"""
if not self._admin:
raise ValueError('Client is not an admin client.')
if self._cluster_stub_internal is None:
if self._instance_stub_internal is None:
raise ValueError('Client has not been started.')
return self._cluster_stub_internal
return self._instance_stub_internal

@property
def _operations_stub(self):
Expand Down Expand Up @@ -285,7 +285,7 @@ def _make_data_stub(self):
return _make_stub(self, DATA_STUB_FACTORY_V2,
DATA_API_HOST_V2, DATA_API_PORT_V2)

def _make_cluster_stub(self):
def _make_instance_stub(self):
"""Creates gRPC stub to make requests to the Instance Admin API.
:rtype: :class:`grpc.beta._stub._AutoIntermediary`
Expand Down Expand Up @@ -340,11 +340,11 @@ def start(self):
self._data_stub_internal = self._make_data_stub()
self._data_stub_internal.__enter__()
if self._admin:
self._cluster_stub_internal = self._make_cluster_stub()
self._instance_stub_internal = self._make_instance_stub()
self._operations_stub_internal = self._make_operations_stub()
self._table_stub_internal = self._make_table_stub()

self._cluster_stub_internal.__enter__()
self._instance_stub_internal.__enter__()
self._operations_stub_internal.__enter__()
self._table_stub_internal.__enter__()

Expand All @@ -362,12 +362,12 @@ def stop(self):
# traceback to __exit__.
self._data_stub_internal.__exit__(None, None, None)
if self._admin:
self._cluster_stub_internal.__exit__(None, None, None)
self._instance_stub_internal.__exit__(None, None, None)
self._operations_stub_internal.__exit__(None, None, None)
self._table_stub_internal.__exit__(None, None, None)

self._data_stub_internal = None
self._cluster_stub_internal = None
self._instance_stub_internal = None
self._operations_stub_internal = None
self._table_stub_internal = None

Expand All @@ -392,13 +392,31 @@ def instance(self, instance_id, display_name=None):
"""
return Instance(instance_id, self, display_name=display_name)

def list_instances(self):
"""List instances owned by the project.
:rtype: tuple
:returns: A pair of results, the first is a list of
:class:`.Instance` objects returned and the second is a
list of strings (the failed locations in the request).
"""
request_pb = instance_admin_v2_pb2.ListInstancesRequest(
parent=self.project_name)

response = self._instance_stub.ListInstances(
request_pb, self.timeout_seconds)

instances = [Instance.from_pb(instance_pb, self)
for instance_pb in response.instances]
return instances, response.failed_locations


class _MetadataPlugin(object):
"""Callable class to transform metadata for gRPC requests.
:type client: :class:`.client.Client`
:param client: The client that owns the cluster. Provides authorization and
user agent.
:param client: The client that owns the instance.
Provides authorization and user agent.
"""

def __init__(self, client):
Expand All @@ -421,8 +439,8 @@ def _make_stub(client, stub_factory, host, port):
Uses / depends on the beta implementation of gRPC.
:type client: :class:`.client.Client`
:param client: The client that owns the cluster. Provides authorization and
user agent.
:param client: The client that owns the instance.
Provides authorization and user agent.
:type stub_factory: callable
:param stub_factory: A factory which will create a gRPC stub for
Expand Down
14 changes: 7 additions & 7 deletions gcloud/bigtable/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def _update_from_pb(self, cluster_pb):
def from_pb(cls, cluster_pb, instance):
"""Creates a cluster instance from a protobuf.
:type cluster_pb: :class:`bigtable_cluster_data_pb2.Cluster`
:type cluster_pb: :class:`instance_pb2.Cluster`
:param cluster_pb: A cluster protobuf object.
:type instance: :class:`.instance.Instance>`
Expand Down Expand Up @@ -288,8 +288,8 @@ def __ne__(self, other):
def reload(self):
"""Reload the metadata for this cluster."""
request_pb = messages_v2_pb2.GetClusterRequest(name=self.name)
# We expect a `._generated.bigtable_cluster_data_pb2.Cluster`.
cluster_pb = self._instance._client._cluster_stub.GetCluster(
# We expect a `._generated_v2.instance_pb2.Cluster`.
cluster_pb = self._instance._client._instance_stub.GetCluster(
request_pb, self._instance._client.timeout_seconds)

# NOTE: _update_from_pb does not check that the project, instance and
Expand Down Expand Up @@ -318,7 +318,7 @@ def create(self):
"""
request_pb = _prepare_create_request(self)
# We expect a `google.longrunning.operations_pb2.Operation`.
operation_pb = self._instance._client._cluster_stub.CreateCluster(
operation_pb = self._instance._client._instance_stub.CreateCluster(
request_pb, self._instance._client.timeout_seconds)

op_id = _process_operation(operation_pb)
Expand Down Expand Up @@ -346,8 +346,8 @@ def update(self):
name=self.name,
serve_nodes=self.serve_nodes,
)
# Ignore expected `._generated.bigtable_cluster_data_pb2.Cluster`.
operation_pb = self._instance._client._cluster_stub.UpdateCluster(
# Ignore expected `._generated_v2.instance_pb2.Cluster`.
operation_pb = self._instance._client._instance_stub.UpdateCluster(
request_pb, self._instance._client.timeout_seconds)

op_id = _process_operation(operation_pb)
Expand Down Expand Up @@ -380,5 +380,5 @@ def delete(self):
"""
request_pb = messages_v2_pb2.DeleteClusterRequest(name=self.name)
# We expect a `google.protobuf.empty_pb2.Empty`
self._instance._client._cluster_stub.DeleteCluster(
self._instance._client._instance_stub.DeleteCluster(
request_pb, self._instance._client.timeout_seconds)
2 changes: 1 addition & 1 deletion gcloud/bigtable/row.py
Original file line number Diff line number Diff line change
Expand Up @@ -854,7 +854,7 @@ def _parse_rmw_row_response(row_response):
def _parse_family_pb(family_pb):
"""Parses a Family protobuf into a dictionary.
:type family_pb: :class:`._generated.bigtable_data_pb2.Family`
:type family_pb: :class:`._generated_v2.data_pb2.Family`
:param family_pb: A protobuf
:rtype: tuple
Expand Down
2 changes: 1 addition & 1 deletion gcloud/bigtable/row_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(self, value, timestamp, labels=()):
def from_pb(cls, cell_pb):
"""Create a new cell from a Cell protobuf.
:type cell_pb: :class:`._generated.bigtable_data_pb2.Cell`
:type cell_pb: :class:`._generated_v2.data_pb2.Cell`
:param cell_pb: The protobuf to convert.
:rtype: :class:`Cell`
Expand Down
8 changes: 4 additions & 4 deletions gcloud/bigtable/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def create(self, initial_split_keys=None):
.. note::
Though a :class:`._generated.bigtable_table_data_pb2.Table` is also
Though a :class:`._generated_v2.table_pb2.Table` is also
allowed (as the ``table`` property) in a create table request, we
do not support it in this method. As mentioned in the
:class:`Table` docstring, the name is the only useful property in
Expand All @@ -150,7 +150,7 @@ def create(self, initial_split_keys=None):
.. note::
A create request returns a
:class:`._generated.bigtable_table_data_pb2.Table` but we don't use
:class:`._generated_v2.table_pb2.Table` but we don't use
this response. The proto definition allows for the inclusion of a
``current_operation`` in the response, but it does not appear that
the Cloud Bigtable API returns any operation.
Expand All @@ -174,7 +174,7 @@ def create(self, initial_split_keys=None):
table_id=self.table_id,
)
client = self._instance._client
# We expect a `._generated.bigtable_table_data_pb2.Table`
# We expect a `._generated_v2.table_pb2.Table`
client._table_stub.CreateTable(request_pb, client.timeout_seconds)

def delete(self):
Expand All @@ -199,7 +199,7 @@ def list_column_families(self):
request_pb = table_admin_messages_v2_pb2.GetTableRequest(
name=self.name)
client = self._instance._client
# We expect a `._generated.bigtable_table_data_pb2.Table`
# We expect a `._generated_v2.table_pb2.Table`
table_pb = client._table_stub.GetTable(request_pb,
client.timeout_seconds)

Expand Down
Loading

0 comments on commit 0ac2def

Please sign in to comment.