From 47b9246197092c507153dc777254eb0403ef1c40 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Sun, 26 Jun 2016 17:58:21 -0400 Subject: [PATCH] Alias Bigtable V1 imports / factories / entry point constants. Indicate their V1 source in their names. Prepratory to converting to V2 equivalents. --- gcloud/bigtable/client.py | 76 ++++++---- gcloud/bigtable/cluster.py | 33 ++-- gcloud/bigtable/column_family.py | 42 +++--- gcloud/bigtable/row.py | 47 +++--- gcloud/bigtable/row_filters.py | 100 ++++++------ gcloud/bigtable/table.py | 25 +-- gcloud/bigtable/test_client.py | 82 +++++----- gcloud/bigtable/test_cluster.py | 112 +++++++------- gcloud/bigtable/test_column_family.py | 110 ++++++++------ gcloud/bigtable/test_row.py | 132 ++++++++-------- gcloud/bigtable/test_row_data.py | 105 +++++++------ gcloud/bigtable/test_row_filters.py | 209 +++++++++++++++----------- gcloud/bigtable/test_table.py | 90 +++++------ 13 files changed, 631 insertions(+), 532 deletions(-) diff --git a/gcloud/bigtable/client.py b/gcloud/bigtable/client.py index 49faf42c8a25..0b97922894e4 100644 --- a/gcloud/bigtable/client.py +++ b/gcloud/bigtable/client.py @@ -31,40 +31,52 @@ from grpc.beta import implementations -from gcloud.bigtable._generated import bigtable_cluster_data_pb2 as data_pb2 -from gcloud.bigtable._generated import bigtable_cluster_service_pb2 +# Cluster admin service is V1-only (V2 provides instance admin instead) from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) -from gcloud.bigtable._generated import bigtable_service_pb2 -from gcloud.bigtable._generated import bigtable_table_service_pb2 -from gcloud.bigtable._generated import operations_grpc_pb2 + bigtable_cluster_data_pb2 as cluster_data_v1_pb2) +from gcloud.bigtable._generated import ( + bigtable_cluster_service_pb2 as cluster_service_v1_pb2) +from gcloud.bigtable._generated import ( + bigtable_cluster_service_messages_pb2 as cluster_messages_v1_pb2) +# V1 table admin service +from gcloud.bigtable._generated import ( + bigtable_table_service_pb2 as table_service_v1_pb2) +# V1 data service +from gcloud.bigtable._generated import ( + bigtable_service_pb2 as data_service_v1_pb2) + +from gcloud.bigtable._generated import ( + operations_grpc_pb2 as operations_grpc_v1_pb2) + from gcloud.bigtable.cluster import Cluster from gcloud.client import _ClientFactoryMixin from gcloud.client import _ClientProjectMixin from gcloud.credentials import get_credentials -TABLE_STUB_FACTORY = ( - bigtable_table_service_pb2.beta_create_BigtableTableService_stub) -TABLE_ADMIN_HOST = 'bigtabletableadmin.googleapis.com' +TABLE_STUB_FACTORY_V1 = ( + table_service_v1_pb2.beta_create_BigtableTableService_stub) +TABLE_ADMIN_HOST_V1 = 'bigtabletableadmin.googleapis.com' """Table Admin API request host.""" -TABLE_ADMIN_PORT = 443 +TABLE_ADMIN_PORT_V1 = 443 """Table Admin API request port.""" -CLUSTER_STUB_FACTORY = ( - bigtable_cluster_service_pb2.beta_create_BigtableClusterService_stub) -CLUSTER_ADMIN_HOST = 'bigtableclusteradmin.googleapis.com' +CLUSTER_STUB_FACTORY_V1 = ( + cluster_service_v1_pb2.beta_create_BigtableClusterService_stub) +CLUSTER_ADMIN_HOST_V1 = 'bigtableclusteradmin.googleapis.com' """Cluster Admin API request host.""" -CLUSTER_ADMIN_PORT = 443 +CLUSTER_ADMIN_PORT_V1 = 443 """Cluster Admin API request port.""" -DATA_STUB_FACTORY = bigtable_service_pb2.beta_create_BigtableService_stub -DATA_API_HOST = 'bigtable.googleapis.com' +DATA_STUB_FACTORY_V1 = data_service_v1_pb2.beta_create_BigtableService_stub +DATA_API_HOST_V1 = 'bigtable.googleapis.com' """Data API request host.""" -DATA_API_PORT = 443 +DATA_API_PORT_V1 = 443 """Data API request port.""" -OPERATIONS_STUB_FACTORY = operations_grpc_pb2.beta_create_Operations_stub +OPERATIONS_STUB_FACTORY_V1 = operations_grpc_v1_pb2.beta_create_Operations_stub +OPERATIONS_API_HOST_V1 = CLUSTER_ADMIN_HOST_V1 +OPERATIONS_API_PORT_V1 = CLUSTER_ADMIN_PORT_V1 ADMIN_SCOPE = 'https://www.googleapis.com/auth/bigtable.admin' """Scope for interacting with the Cluster Admin and Table Admin APIs.""" @@ -275,8 +287,8 @@ def _make_data_stub(self): :rtype: :class:`grpc.beta._stub._AutoIntermediary` :returns: A gRPC stub object. """ - return _make_stub(self, DATA_STUB_FACTORY, - DATA_API_HOST, DATA_API_PORT) + return _make_stub(self, DATA_STUB_FACTORY_V1, + DATA_API_HOST_V1, DATA_API_PORT_V1) def _make_cluster_stub(self): """Creates gRPC stub to make requests to the Cluster Admin API. @@ -284,8 +296,8 @@ def _make_cluster_stub(self): :rtype: :class:`grpc.beta._stub._AutoIntermediary` :returns: A gRPC stub object. """ - return _make_stub(self, CLUSTER_STUB_FACTORY, - CLUSTER_ADMIN_HOST, CLUSTER_ADMIN_PORT) + return _make_stub(self, CLUSTER_STUB_FACTORY_V1, + CLUSTER_ADMIN_HOST_V1, CLUSTER_ADMIN_PORT_V1) def _make_operations_stub(self): """Creates gRPC stub to make requests to the Operations API. @@ -296,8 +308,8 @@ def _make_operations_stub(self): :rtype: :class:`grpc.beta._stub._AutoIntermediary` :returns: A gRPC stub object. """ - return _make_stub(self, OPERATIONS_STUB_FACTORY, - CLUSTER_ADMIN_HOST, CLUSTER_ADMIN_PORT) + return _make_stub(self, OPERATIONS_STUB_FACTORY_V1, + OPERATIONS_API_HOST_V1, OPERATIONS_API_PORT_V1) def _make_table_stub(self): """Creates gRPC stub to make requests to the Table Admin API. @@ -305,8 +317,8 @@ def _make_table_stub(self): :rtype: :class:`grpc.beta._stub._AutoIntermediary` :returns: A gRPC stub object. """ - return _make_stub(self, TABLE_STUB_FACTORY, - TABLE_ADMIN_HOST, TABLE_ADMIN_PORT) + return _make_stub(self, TABLE_STUB_FACTORY_V1, + TABLE_ADMIN_HOST_V1, TABLE_ADMIN_PORT_V1) def is_started(self): """Check if the client has been started. @@ -401,14 +413,15 @@ def list_zones(self): :raises: :class:`ValueError ` if one of the zones is not in ``OK`` state. """ - request_pb = messages_pb2.ListZonesRequest(name=self.project_name) - # We expect a `.messages_pb2.ListZonesResponse` + request_pb = cluster_messages_v1_pb2.ListZonesRequest( + name=self.project_name) + # We expect a `.cluster_messages_v1_pb2.ListZonesResponse` list_zones_response = self._cluster_stub.ListZones( request_pb, self.timeout_seconds) result = [] for zone in list_zones_response.zones: - if zone.status != data_pb2.Zone.OK: + if zone.status != cluster_data_v1_pb2.Zone.OK: raise ValueError('Zone %s not in OK state' % ( zone.display_name,)) result.append(zone.display_name) @@ -422,8 +435,9 @@ def list_clusters(self): returned and the second is a list of strings (the failed zones in the request). """ - request_pb = messages_pb2.ListClustersRequest(name=self.project_name) - # We expect a `.messages_pb2.ListClustersResponse` + request_pb = cluster_messages_v1_pb2.ListClustersRequest( + name=self.project_name) + # We expect a `.cluster_messages_v1_pb2.ListClustersResponse` list_clusters_response = self._cluster_stub.ListClusters( request_pb, self.timeout_seconds) diff --git a/gcloud/bigtable/cluster.py b/gcloud/bigtable/cluster.py index 55e7a49b446d..28875730c292 100644 --- a/gcloud/bigtable/cluster.py +++ b/gcloud/bigtable/cluster.py @@ -20,11 +20,12 @@ from google.longrunning import operations_pb2 from gcloud._helpers import _pb_timestamp_to_datetime -from gcloud.bigtable._generated import bigtable_cluster_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as table_messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) +from gcloud.bigtable._generated import ( + bigtable_table_service_messages_pb2 as table_messages_v1_pb2) from gcloud.bigtable.table import Table @@ -40,9 +41,9 @@ _UPDATE_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'UpdateClusterMetadata' _UNDELETE_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'UndeleteClusterMetadata' _TYPE_URL_MAP = { - _CLUSTER_CREATE_METADATA: messages_pb2.CreateClusterMetadata, - _UPDATE_CREATE_METADATA: messages_pb2.UpdateClusterMetadata, - _UNDELETE_CREATE_METADATA: messages_pb2.UndeleteClusterMetadata, + _CLUSTER_CREATE_METADATA: messages_v1_pb2.CreateClusterMetadata, + _UPDATE_CREATE_METADATA: messages_v1_pb2.UpdateClusterMetadata, + _UNDELETE_CREATE_METADATA: messages_v1_pb2.UndeleteClusterMetadata, } DEFAULT_SERVE_NODES = 3 @@ -55,15 +56,15 @@ def _prepare_create_request(cluster): :type cluster: :class:`Cluster` :param cluster: The cluster to be created. - :rtype: :class:`.messages_pb2.CreateClusterRequest` + :rtype: :class:`.messages_v1_pb2.CreateClusterRequest` :returns: The CreateCluster request object containing the cluster info. """ zone_full_name = ('projects/' + cluster._client.project + '/zones/' + cluster.zone) - return messages_pb2.CreateClusterRequest( + return messages_v1_pb2.CreateClusterRequest( name=zone_full_name, cluster_id=cluster.cluster_id, - cluster=data_pb2.Cluster( + cluster=data_v1_pb2.Cluster( display_name=cluster.display_name, serve_nodes=cluster.serve_nodes, ), @@ -198,7 +199,7 @@ class Cluster(object): .. note:: For now, we leave out the ``default_storage_type`` (an enum) - which if not sent will end up as :data:`.data_pb2.STORAGE_SSD`. + which if not sent will end up as :data:`.data_v1_pb2.STORAGE_SSD`. :type zone: str :param zone: The name of the zone where the cluster resides. @@ -332,7 +333,7 @@ def __ne__(self, other): def reload(self): """Reload the metadata for this cluster.""" - request_pb = messages_pb2.GetClusterRequest(name=self.name) + request_pb = messages_v1_pb2.GetClusterRequest(name=self.name) # We expect a `._generated.bigtable_cluster_data_pb2.Cluster`. cluster_pb = self._client._cluster_stub.GetCluster( request_pb, self._client.timeout_seconds) @@ -389,7 +390,7 @@ def update(self): :returns: The long-running operation corresponding to the update operation. """ - request_pb = data_pb2.Cluster( + request_pb = data_v1_pb2.Cluster( name=self.name, display_name=self.display_name, serve_nodes=self.serve_nodes, @@ -426,7 +427,7 @@ def delete(self): irrevocably disappear from the API, and their data will be permanently deleted. """ - request_pb = messages_pb2.DeleteClusterRequest(name=self.name) + request_pb = messages_v1_pb2.DeleteClusterRequest(name=self.name) # We expect a `google.protobuf.empty_pb2.Empty` self._client._cluster_stub.DeleteCluster( request_pb, self._client.timeout_seconds) @@ -456,7 +457,7 @@ def undelete(self): :returns: The long-running operation corresponding to the undelete operation. """ - request_pb = messages_pb2.UndeleteClusterRequest(name=self.name) + request_pb = messages_v1_pb2.UndeleteClusterRequest(name=self.name) # We expect a `google.longrunning.operations_pb2.Operation`. operation_pb2 = self._client._cluster_stub.UndeleteCluster( request_pb, self._client.timeout_seconds) @@ -472,8 +473,8 @@ def list_tables(self): :raises: :class:`ValueError ` if one of the returned tables has a name that is not of the expected format. """ - request_pb = table_messages_pb2.ListTablesRequest(name=self.name) - # We expect a `table_messages_pb2.ListTablesResponse` + request_pb = table_messages_v1_pb2.ListTablesRequest(name=self.name) + # We expect a `table_messages_v1_pb2.ListTablesResponse` table_list_pb = self._client._table_stub.ListTables( request_pb, self._client.timeout_seconds) diff --git a/gcloud/bigtable/column_family.py b/gcloud/bigtable/column_family.py index c0d9060316a4..5d17be804f9c 100644 --- a/gcloud/bigtable/column_family.py +++ b/gcloud/bigtable/column_family.py @@ -20,9 +20,10 @@ from google.protobuf import duration_pb2 from gcloud._helpers import _total_seconds -from gcloud.bigtable._generated import bigtable_table_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as messages_pb2) + bigtable_table_data_pb2 as data_v1_pb2) +from gcloud.bigtable._generated import ( + bigtable_table_service_messages_pb2 as messages_v1_pb2) def _timedelta_to_duration_pb(timedelta_val): @@ -110,10 +111,10 @@ def __eq__(self, other): def to_pb(self): """Converts the garbage collection rule to a protobuf. - :rtype: :class:`.data_pb2.GcRule` + :rtype: :class:`.data_v1_pb2.GcRule` :returns: The converted current object. """ - return data_pb2.GcRule(max_num_versions=self.max_num_versions) + return data_v1_pb2.GcRule(max_num_versions=self.max_num_versions) class MaxAgeGCRule(GarbageCollectionRule): @@ -134,11 +135,11 @@ def __eq__(self, other): def to_pb(self): """Converts the garbage collection rule to a protobuf. - :rtype: :class:`.data_pb2.GcRule` + :rtype: :class:`.data_v1_pb2.GcRule` :returns: The converted current object. """ max_age = _timedelta_to_duration_pb(self.max_age) - return data_pb2.GcRule(max_age=max_age) + return data_v1_pb2.GcRule(max_age=max_age) class GCRuleUnion(GarbageCollectionRule): @@ -159,12 +160,12 @@ def __eq__(self, other): def to_pb(self): """Converts the union into a single GC rule as a protobuf. - :rtype: :class:`.data_pb2.GcRule` + :rtype: :class:`.data_v1_pb2.GcRule` :returns: The converted current object. """ - union = data_pb2.GcRule.Union( + union = data_v1_pb2.GcRule.Union( rules=[rule.to_pb() for rule in self.rules]) - return data_pb2.GcRule(union=union) + return data_v1_pb2.GcRule(union=union) class GCRuleIntersection(GarbageCollectionRule): @@ -185,12 +186,12 @@ def __eq__(self, other): def to_pb(self): """Converts the intersection into a single GC rule as a protobuf. - :rtype: :class:`.data_pb2.GcRule` + :rtype: :class:`.data_v1_pb2.GcRule` :returns: The converted current object. """ - intersection = data_pb2.GcRule.Intersection( + intersection = data_v1_pb2.GcRule.Intersection( rules=[rule.to_pb() for rule in self.rules]) - return data_pb2.GcRule(intersection=intersection) + return data_v1_pb2.GcRule(intersection=intersection) class ColumnFamily(object): @@ -250,16 +251,17 @@ def __ne__(self, other): def create(self): """Create this column family.""" if self.gc_rule is None: - column_family = data_pb2.ColumnFamily() + column_family = data_v1_pb2.ColumnFamily() else: - column_family = data_pb2.ColumnFamily(gc_rule=self.gc_rule.to_pb()) - request_pb = messages_pb2.CreateColumnFamilyRequest( + column_family = data_v1_pb2.ColumnFamily( + gc_rule=self.gc_rule.to_pb()) + request_pb = messages_v1_pb2.CreateColumnFamilyRequest( name=self._table.name, column_family_id=self.column_family_id, column_family=column_family, ) client = self._table._cluster._client - # We expect a `.data_pb2.ColumnFamily`. We ignore it since the only + # We expect a `.data_v1_pb2.ColumnFamily`. We ignore it since the only # data it contains are the GC rule and the column family ID already # stored on this instance. client._table_stub.CreateColumnFamily(request_pb, @@ -276,9 +278,9 @@ def update(self): request_kwargs = {'name': self.name} if self.gc_rule is not None: request_kwargs['gc_rule'] = self.gc_rule.to_pb() - request_pb = data_pb2.ColumnFamily(**request_kwargs) + request_pb = data_v1_pb2.ColumnFamily(**request_kwargs) client = self._table._cluster._client - # We expect a `.data_pb2.ColumnFamily`. We ignore it since the only + # We expect a `.data_v1_pb2.ColumnFamily`. We ignore it since the only # data it contains are the GC rule and the column family ID already # stored on this instance. client._table_stub.UpdateColumnFamily(request_pb, @@ -286,7 +288,7 @@ def update(self): def delete(self): """Delete this column family.""" - request_pb = messages_pb2.DeleteColumnFamilyRequest(name=self.name) + request_pb = messages_v1_pb2.DeleteColumnFamilyRequest(name=self.name) client = self._table._cluster._client # We expect a `google.protobuf.empty_pb2.Empty` client._table_stub.DeleteColumnFamily(request_pb, @@ -296,7 +298,7 @@ def delete(self): def _gc_rule_from_pb(gc_rule_pb): """Convert a protobuf GC rule to a native object. - :type gc_rule_pb: :class:`.data_pb2.GcRule` + :type gc_rule_pb: :class:`.data_v1_pb2.GcRule` :param gc_rule_pb: The GC rule to convert. :rtype: :class:`GarbageCollectionRule` or :data:`NoneType ` diff --git a/gcloud/bigtable/row.py b/gcloud/bigtable/row.py index cb9ce2e67e3d..1dbd38aa7962 100644 --- a/gcloud/bigtable/row.py +++ b/gcloud/bigtable/row.py @@ -22,9 +22,10 @@ from gcloud._helpers import _datetime_from_microseconds from gcloud._helpers import _microseconds_from_datetime from gcloud._helpers import _to_bytes -from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) +from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) _PACK_I64 = struct.Struct('>q').pack @@ -133,13 +134,13 @@ def _set_cell(self, column_family_id, column, value, timestamp=None, # Truncate to millisecond granularity. timestamp_micros -= (timestamp_micros % 1000) - mutation_val = data_pb2.Mutation.SetCell( + mutation_val = data_v1_pb2.Mutation.SetCell( family_name=column_family_id, column_qualifier=column, timestamp_micros=timestamp_micros, value=value, ) - mutation_pb = data_pb2.Mutation(set_cell=mutation_val) + mutation_pb = data_v1_pb2.Mutation(set_cell=mutation_val) self._get_mutations(state).append(mutation_pb) def _delete(self, state=None): @@ -155,8 +156,8 @@ def _delete(self, state=None): :param state: (Optional) The state that is passed along to :meth:`_get_mutations`. """ - mutation_val = data_pb2.Mutation.DeleteFromRow() - mutation_pb = data_pb2.Mutation(delete_from_row=mutation_val) + mutation_val = data_v1_pb2.Mutation.DeleteFromRow() + mutation_pb = data_v1_pb2.Mutation(delete_from_row=mutation_val) self._get_mutations(state).append(mutation_pb) def _delete_cells(self, column_family_id, columns, time_range=None, @@ -187,10 +188,10 @@ def _delete_cells(self, column_family_id, columns, time_range=None, """ mutations_list = self._get_mutations(state) if columns is self.ALL_COLUMNS: - mutation_val = data_pb2.Mutation.DeleteFromFamily( + mutation_val = data_v1_pb2.Mutation.DeleteFromFamily( family_name=column_family_id, ) - mutation_pb = data_pb2.Mutation(delete_from_family=mutation_val) + mutation_pb = data_v1_pb2.Mutation(delete_from_family=mutation_val) mutations_list.append(mutation_pb) else: delete_kwargs = {} @@ -206,9 +207,9 @@ def _delete_cells(self, column_family_id, columns, time_range=None, family_name=column_family_id, column_qualifier=column, ) - mutation_val = data_pb2.Mutation.DeleteFromColumn( + mutation_val = data_v1_pb2.Mutation.DeleteFromColumn( **delete_kwargs) - mutation_pb = data_pb2.Mutation( + mutation_pb = data_v1_pb2.Mutation( delete_from_column=mutation_val) to_append.append(mutation_pb) @@ -388,7 +389,7 @@ def commit(self): if num_mutations > MAX_MUTATIONS: raise ValueError('%d total mutations exceed the maximum allowable ' '%d.' % (num_mutations, MAX_MUTATIONS)) - request_pb = messages_pb2.MutateRowRequest( + request_pb = messages_v1_pb2.MutateRowRequest( table_name=self._table.name, row_key=self._row_key, mutations=mutations_list, @@ -503,14 +504,14 @@ def commit(self): 'mutations and %d false mutations.' % ( MAX_MUTATIONS, num_true_mutations, num_false_mutations)) - request_pb = messages_pb2.CheckAndMutateRowRequest( + request_pb = messages_v1_pb2.CheckAndMutateRowRequest( table_name=self._table.name, row_key=self._row_key, predicate_filter=self._filter.to_pb(), true_mutations=true_mutations, false_mutations=false_mutations, ) - # We expect a `.messages_pb2.CheckAndMutateRowResponse` + # We expect a `.messages_v1_pb2.CheckAndMutateRowResponse` client = self._table._cluster._client resp = client._data_stub.CheckAndMutateRow( request_pb, client.timeout_seconds) @@ -700,9 +701,10 @@ def append_cell_value(self, column_family_id, column, value): """ column = _to_bytes(column) value = _to_bytes(value) - rule_pb = data_pb2.ReadModifyWriteRule(family_name=column_family_id, - column_qualifier=column, - append_value=value) + rule_pb = data_v1_pb2.ReadModifyWriteRule( + family_name=column_family_id, + column_qualifier=column, + append_value=value) self._rule_pb_list.append(rule_pb) def increment_cell_value(self, column_family_id, column, int_value): @@ -736,9 +738,10 @@ def increment_cell_value(self, column_family_id, column, int_value): will fail. """ column = _to_bytes(column) - rule_pb = data_pb2.ReadModifyWriteRule(family_name=column_family_id, - column_qualifier=column, - increment_amount=int_value) + rule_pb = data_v1_pb2.ReadModifyWriteRule( + family_name=column_family_id, + column_qualifier=column, + increment_amount=int_value) self._rule_pb_list.append(rule_pb) def commit(self): @@ -791,12 +794,12 @@ def commit(self): if num_mutations > MAX_MUTATIONS: raise ValueError('%d total append mutations exceed the maximum ' 'allowable %d.' % (num_mutations, MAX_MUTATIONS)) - request_pb = messages_pb2.ReadModifyWriteRowRequest( + request_pb = messages_v1_pb2.ReadModifyWriteRowRequest( table_name=self._table.name, row_key=self._row_key, rules=self._rule_pb_list, ) - # We expect a `.data_pb2.Row` + # We expect a `.data_v1_pb2.Row` client = self._table._cluster._client row_response = client._data_stub.ReadModifyWriteRow( request_pb, client.timeout_seconds) @@ -811,7 +814,7 @@ def commit(self): def _parse_rmw_row_response(row_response): """Parses the response to a ``ReadModifyWriteRow`` request. - :type row_response: :class:`.data_pb2.Row` + :type row_response: :class:`.data_v1_pb2.Row` :param row_response: The response row (with only modified cells) from a ``ReadModifyWriteRow`` request. diff --git a/gcloud/bigtable/row_filters.py b/gcloud/bigtable/row_filters.py index b7a1388b3a09..2b11a06bfdd9 100644 --- a/gcloud/bigtable/row_filters.py +++ b/gcloud/bigtable/row_filters.py @@ -17,7 +17,8 @@ from gcloud._helpers import _microseconds_from_datetime from gcloud._helpers import _to_bytes -from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 +from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) class RowFilter(object): @@ -65,10 +66,10 @@ class SinkFilter(_BoolFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(sink=self.flag) + return data_v1_pb2.RowFilter(sink=self.flag) class PassAllFilter(_BoolFilter): @@ -83,10 +84,10 @@ class PassAllFilter(_BoolFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(pass_all_filter=self.flag) + return data_v1_pb2.RowFilter(pass_all_filter=self.flag) class BlockAllFilter(_BoolFilter): @@ -100,10 +101,10 @@ class BlockAllFilter(_BoolFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(block_all_filter=self.flag) + return data_v1_pb2.RowFilter(block_all_filter=self.flag) class _RegexFilter(RowFilter): @@ -153,10 +154,10 @@ class RowKeyRegexFilter(_RegexFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(row_key_regex_filter=self.regex) + return data_v1_pb2.RowFilter(row_key_regex_filter=self.regex) class RowSampleFilter(RowFilter): @@ -178,10 +179,10 @@ def __eq__(self, other): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(row_sample_filter=self.sample) + return data_v1_pb2.RowFilter(row_sample_filter=self.sample) class FamilyNameRegexFilter(_RegexFilter): @@ -202,10 +203,10 @@ class FamilyNameRegexFilter(_RegexFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(family_name_regex_filter=self.regex) + return data_v1_pb2.RowFilter(family_name_regex_filter=self.regex) class ColumnQualifierRegexFilter(_RegexFilter): @@ -232,10 +233,10 @@ class ColumnQualifierRegexFilter(_RegexFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(column_qualifier_regex_filter=self.regex) + return data_v1_pb2.RowFilter(column_qualifier_regex_filter=self.regex) class TimestampRange(object): @@ -266,7 +267,7 @@ def __ne__(self, other): def to_pb(self): """Converts the :class:`TimestampRange` to a protobuf. - :rtype: :class:`.data_pb2.TimestampRange` + :rtype: :class:`.data_v1_pb2.TimestampRange` :returns: The converted current object. """ timestamp_range_kwargs = {} @@ -276,7 +277,7 @@ def to_pb(self): if self.end is not None: timestamp_range_kwargs['end_timestamp_micros'] = ( _microseconds_from_datetime(self.end)) - return data_pb2.TimestampRange(**timestamp_range_kwargs) + return data_v1_pb2.TimestampRange(**timestamp_range_kwargs) class TimestampRangeFilter(RowFilter): @@ -300,10 +301,11 @@ def to_pb(self): First converts the ``range_`` on the current object to a protobuf and then uses it in the ``timestamp_range_filter`` field. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(timestamp_range_filter=self.range_.to_pb()) + return data_v1_pb2.RowFilter( + timestamp_range_filter=self.range_.to_pb()) class ColumnRangeFilter(RowFilter): @@ -375,10 +377,10 @@ def __eq__(self, other): def to_pb(self): """Converts the row filter to a protobuf. - First converts to a :class:`.data_pb2.ColumnRange` and then uses it + First converts to a :class:`.data_v1_pb2.ColumnRange` and then uses it in the ``column_range_filter`` field. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ column_range_kwargs = {'family_name': self.column_family_id} @@ -395,8 +397,8 @@ def to_pb(self): key = 'end_qualifier_exclusive' column_range_kwargs[key] = _to_bytes(self.end_column) - column_range = data_pb2.ColumnRange(**column_range_kwargs) - return data_pb2.RowFilter(column_range_filter=column_range) + column_range = data_v1_pb2.ColumnRange(**column_range_kwargs) + return data_v1_pb2.RowFilter(column_range_filter=column_range) class ValueRegexFilter(_RegexFilter): @@ -423,10 +425,10 @@ class ValueRegexFilter(_RegexFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(value_regex_filter=self.regex) + return data_v1_pb2.RowFilter(value_regex_filter=self.regex) class ValueRangeFilter(RowFilter): @@ -492,10 +494,10 @@ def __eq__(self, other): def to_pb(self): """Converts the row filter to a protobuf. - First converts to a :class:`.data_pb2.ValueRange` and then uses + First converts to a :class:`.data_v1_pb2.ValueRange` and then uses it to create a row filter protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ value_range_kwargs = {} @@ -512,8 +514,8 @@ def to_pb(self): key = 'end_value_exclusive' value_range_kwargs[key] = _to_bytes(self.end_value) - value_range = data_pb2.ValueRange(**value_range_kwargs) - return data_pb2.RowFilter(value_range_filter=value_range) + value_range = data_v1_pb2.ValueRange(**value_range_kwargs) + return data_v1_pb2.RowFilter(value_range_filter=value_range) class _CellCountFilter(RowFilter): @@ -545,10 +547,11 @@ class CellsRowOffsetFilter(_CellCountFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(cells_per_row_offset_filter=self.num_cells) + return data_v1_pb2.RowFilter( + cells_per_row_offset_filter=self.num_cells) class CellsRowLimitFilter(_CellCountFilter): @@ -561,10 +564,10 @@ class CellsRowLimitFilter(_CellCountFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(cells_per_row_limit_filter=self.num_cells) + return data_v1_pb2.RowFilter(cells_per_row_limit_filter=self.num_cells) class CellsColumnLimitFilter(_CellCountFilter): @@ -579,10 +582,11 @@ class CellsColumnLimitFilter(_CellCountFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(cells_per_column_limit_filter=self.num_cells) + return data_v1_pb2.RowFilter( + cells_per_column_limit_filter=self.num_cells) class StripValueTransformerFilter(_BoolFilter): @@ -597,10 +601,10 @@ class StripValueTransformerFilter(_BoolFilter): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(strip_value_transformer=self.flag) + return data_v1_pb2.RowFilter(strip_value_transformer=self.flag) class ApplyLabelFilter(RowFilter): @@ -633,10 +637,10 @@ def __eq__(self, other): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - return data_pb2.RowFilter(apply_label_transformer=self.label) + return data_v1_pb2.RowFilter(apply_label_transformer=self.label) class _FilterCombination(RowFilter): @@ -675,12 +679,12 @@ class RowFilterChain(_FilterCombination): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - chain = data_pb2.RowFilter.Chain( + chain = data_v1_pb2.RowFilter.Chain( filters=[row_filter.to_pb() for row_filter in self.filters]) - return data_pb2.RowFilter(chain=chain) + return data_v1_pb2.RowFilter(chain=chain) class RowFilterUnion(_FilterCombination): @@ -699,12 +703,12 @@ class RowFilterUnion(_FilterCombination): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ - interleave = data_pb2.RowFilter.Interleave( + interleave = data_v1_pb2.RowFilter.Interleave( filters=[row_filter.to_pb() for row_filter in self.filters]) - return data_pb2.RowFilter(interleave=interleave) + return data_v1_pb2.RowFilter(interleave=interleave) class ConditionalRowFilter(RowFilter): @@ -752,7 +756,7 @@ def __eq__(self, other): def to_pb(self): """Converts the row filter to a protobuf. - :rtype: :class:`.data_pb2.RowFilter` + :rtype: :class:`.data_v1_pb2.RowFilter` :returns: The converted current object. """ condition_kwargs = {'predicate_filter': self.base_filter.to_pb()} @@ -760,5 +764,5 @@ def to_pb(self): condition_kwargs['true_filter'] = self.true_filter.to_pb() if self.false_filter is not None: condition_kwargs['false_filter'] = self.false_filter.to_pb() - condition = data_pb2.RowFilter.Condition(**condition_kwargs) - return data_pb2.RowFilter(condition=condition) + condition = data_v1_pb2.RowFilter.Condition(**condition_kwargs) + return data_v1_pb2.RowFilter(condition=condition) diff --git a/gcloud/bigtable/table.py b/gcloud/bigtable/table.py index c619b7145d71..155b5123c67f 100644 --- a/gcloud/bigtable/table.py +++ b/gcloud/bigtable/table.py @@ -16,11 +16,12 @@ from gcloud._helpers import _to_bytes -from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as data_messages_pb2) + bigtable_table_service_messages_pb2 as messages_v1_pb2) +from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as data_messages_v1_pb2) from gcloud.bigtable.column_family import _gc_rule_from_pb from gcloud.bigtable.column_family import ColumnFamily from gcloud.bigtable.row import AppendRow @@ -167,7 +168,7 @@ def create(self, initial_split_keys=None): created, spanning the key ranges: ``[, s1)``, ``[s1, s2)``, ``[s2, )``. """ - request_pb = messages_pb2.CreateTableRequest( + request_pb = messages_v1_pb2.CreateTableRequest( initial_split_keys=initial_split_keys or [], name=self._cluster.name, table_id=self.table_id, @@ -178,7 +179,7 @@ def create(self, initial_split_keys=None): def delete(self): """Delete this table.""" - request_pb = messages_pb2.DeleteTableRequest(name=self.name) + request_pb = messages_v1_pb2.DeleteTableRequest(name=self.name) client = self._cluster._client # We expect a `google.protobuf.empty_pb2.Empty` client._table_stub.DeleteTable(request_pb, client.timeout_seconds) @@ -194,7 +195,7 @@ def list_column_families(self): family name from the response does not agree with the computed name from the column family ID. """ - request_pb = messages_pb2.GetTableRequest(name=self.name) + request_pb = messages_v1_pb2.GetTableRequest(name=self.name) client = self._cluster._client # We expect a `._generated.bigtable_table_data_pb2.Table` table_pb = client._table_stub.GetTable(request_pb, @@ -233,7 +234,7 @@ def read_row(self, row_key, filter_=None): client = self._cluster._client response_iterator = client._data_stub.ReadRows(request_pb, client.timeout_seconds) - # We expect an iterator of `data_messages_pb2.ReadRowsResponse` + # We expect an iterator of `data_messages_v1_pb2.ReadRowsResponse` result = PartialRowData(row_key) for read_rows_response in response_iterator: result.update_from_read_rows(read_rows_response) @@ -296,7 +297,7 @@ def read_rows(self, start_key=None, end_key=None, client = self._cluster._client response_iterator = client._data_stub.ReadRows(request_pb, client.timeout_seconds) - # We expect an iterator of `data_messages_pb2.ReadRowsResponse` + # We expect an iterator of `data_messages_v1_pb2.ReadRowsResponse` return PartialRowsData(response_iterator) def sample_row_keys(self): @@ -330,7 +331,7 @@ def sample_row_keys(self): or by casting to a :class:`list` and can be cancelled by calling ``cancel()``. """ - request_pb = data_messages_pb2.SampleRowKeysRequest( + request_pb = data_messages_v1_pb2.SampleRowKeysRequest( table_name=self.name) client = self._cluster._client response_iterator = client._data_stub.SampleRowKeys( @@ -383,7 +384,7 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, more than N rows. However, only N ``commit_row`` chunks will be sent. - :rtype: :class:`data_messages_pb2.ReadRowsRequest` + :rtype: :class:`data_messages_v1_pb2.ReadRowsRequest` :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. :raises: :class:`ValueError ` if both ``row_key`` and one of ``start_key`` and ``end_key`` are set @@ -401,7 +402,7 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, range_kwargs['start_key'] = _to_bytes(start_key) if end_key is not None: range_kwargs['end_key'] = _to_bytes(end_key) - row_range = data_pb2.RowRange(**range_kwargs) + row_range = data_v1_pb2.RowRange(**range_kwargs) request_kwargs['row_range'] = row_range if filter_ is not None: request_kwargs['filter'] = filter_.to_pb() @@ -410,4 +411,4 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, if limit is not None: request_kwargs['num_rows_limit'] = limit - return data_messages_pb2.ReadRowsRequest(**request_kwargs) + return data_messages_v1_pb2.ReadRowsRequest(**request_kwargs) diff --git a/gcloud/bigtable/test_client.py b/gcloud/bigtable/test_client.py index bb424ad259b1..ccd276fdb750 100644 --- a/gcloud/bigtable/test_client.py +++ b/gcloud/bigtable/test_client.py @@ -289,9 +289,9 @@ def test_table_stub_unset_failure(self): def test__make_data_stub(self): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT - from gcloud.bigtable.client import DATA_API_HOST - from gcloud.bigtable.client import DATA_API_PORT - from gcloud.bigtable.client import DATA_STUB_FACTORY + from gcloud.bigtable.client import DATA_API_HOST_V1 + from gcloud.bigtable.client import DATA_API_PORT_V1 + from gcloud.bigtable.client import DATA_STUB_FACTORY_V1 credentials = _Credentials() project = 'PROJECT' @@ -311,18 +311,18 @@ def mock_make_stub(*args): self.assertEqual(make_stub_args, [ ( client, - DATA_STUB_FACTORY, - DATA_API_HOST, - DATA_API_PORT, + DATA_STUB_FACTORY_V1, + DATA_API_HOST_V1, + DATA_API_PORT_V1, ), ]) def test__make_cluster_stub(self): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT - from gcloud.bigtable.client import CLUSTER_ADMIN_HOST - from gcloud.bigtable.client import CLUSTER_ADMIN_PORT - from gcloud.bigtable.client import CLUSTER_STUB_FACTORY + from gcloud.bigtable.client import CLUSTER_ADMIN_HOST_V1 + from gcloud.bigtable.client import CLUSTER_ADMIN_PORT_V1 + from gcloud.bigtable.client import CLUSTER_STUB_FACTORY_V1 credentials = _Credentials() project = 'PROJECT' @@ -342,18 +342,18 @@ def mock_make_stub(*args): self.assertEqual(make_stub_args, [ ( client, - CLUSTER_STUB_FACTORY, - CLUSTER_ADMIN_HOST, - CLUSTER_ADMIN_PORT, + CLUSTER_STUB_FACTORY_V1, + CLUSTER_ADMIN_HOST_V1, + CLUSTER_ADMIN_PORT_V1, ), ]) def test__make_operations_stub(self): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT - from gcloud.bigtable.client import CLUSTER_ADMIN_HOST - from gcloud.bigtable.client import CLUSTER_ADMIN_PORT - from gcloud.bigtable.client import OPERATIONS_STUB_FACTORY + from gcloud.bigtable.client import OPERATIONS_API_HOST_V1 + from gcloud.bigtable.client import OPERATIONS_API_PORT_V1 + from gcloud.bigtable.client import OPERATIONS_STUB_FACTORY_V1 credentials = _Credentials() project = 'PROJECT' @@ -373,18 +373,18 @@ def mock_make_stub(*args): self.assertEqual(make_stub_args, [ ( client, - OPERATIONS_STUB_FACTORY, - CLUSTER_ADMIN_HOST, - CLUSTER_ADMIN_PORT, + OPERATIONS_STUB_FACTORY_V1, + OPERATIONS_API_HOST_V1, + OPERATIONS_API_PORT_V1, ), ]) def test__make_table_stub(self): from gcloud._testing import _Monkey from gcloud.bigtable import client as MUT - from gcloud.bigtable.client import TABLE_ADMIN_HOST - from gcloud.bigtable.client import TABLE_ADMIN_PORT - from gcloud.bigtable.client import TABLE_STUB_FACTORY + from gcloud.bigtable.client import TABLE_ADMIN_HOST_V1 + from gcloud.bigtable.client import TABLE_ADMIN_PORT_V1 + from gcloud.bigtable.client import TABLE_STUB_FACTORY_V1 credentials = _Credentials() project = 'PROJECT' @@ -404,9 +404,9 @@ def mock_make_stub(*args): self.assertEqual(make_stub_args, [ ( client, - TABLE_STUB_FACTORY, - TABLE_ADMIN_HOST, - TABLE_ADMIN_PORT, + TABLE_STUB_FACTORY_V1, + TABLE_ADMIN_HOST_V1, + TABLE_ADMIN_PORT_V1, ), ]) @@ -543,9 +543,9 @@ def test_cluster_factory(self): def _list_zones_helper(self, zone_status): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub credentials = _Credentials() @@ -555,17 +555,17 @@ def _list_zones_helper(self, zone_status): admin=True, timeout_seconds=timeout_seconds) # Create request_pb - request_pb = messages_pb2.ListZonesRequest( + request_pb = messages_v1_pb2.ListZonesRequest( name='projects/' + project, ) # Create response_pb zone1 = 'foo' zone2 = 'bar' - response_pb = messages_pb2.ListZonesResponse( + response_pb = messages_v1_pb2.ListZonesResponse( zones=[ - data_pb2.Zone(display_name=zone1, status=zone_status), - data_pb2.Zone(display_name=zone2, status=zone_status), + data_v1_pb2.Zone(display_name=zone1, status=zone_status), + data_v1_pb2.Zone(display_name=zone2, status=zone_status), ], ) @@ -586,20 +586,20 @@ def _list_zones_helper(self, zone_status): def test_list_zones(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) - self._list_zones_helper(data_pb2.Zone.OK) + bigtable_cluster_data_pb2 as data_v1_pb2) + self._list_zones_helper(data_v1_pb2.Zone.OK) def test_list_zones_failure(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) with self.assertRaises(ValueError): - self._list_zones_helper(data_pb2.Zone.EMERGENCY_MAINENANCE) + self._list_zones_helper(data_v1_pb2.Zone.EMERGENCY_MAINENANCE) def test_list_clusters(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub credentials = _Credentials() @@ -609,7 +609,7 @@ def test_list_clusters(self): admin=True, timeout_seconds=timeout_seconds) # Create request_pb - request_pb = messages_pb2.ListClustersRequest( + request_pb = messages_v1_pb2.ListClustersRequest( name='projects/' + project, ) @@ -622,17 +622,17 @@ def test_list_clusters(self): '/clusters/' + cluster_id1) cluster_name2 = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id2) - response_pb = messages_pb2.ListClustersResponse( + response_pb = messages_v1_pb2.ListClustersResponse( failed_zones=[ - data_pb2.Zone(display_name=failed_zone), + data_v1_pb2.Zone(display_name=failed_zone), ], clusters=[ - data_pb2.Cluster( + data_v1_pb2.Cluster( name=cluster_name1, display_name=cluster_name1, serve_nodes=3, ), - data_pb2.Cluster( + data_v1_pb2.Cluster( name=cluster_name2, display_name=cluster_name2, serve_nodes=3, diff --git a/gcloud/bigtable/test_cluster.py b/gcloud/bigtable/test_cluster.py index 427a4ec9126b..294f9a0d0f55 100644 --- a/gcloud/bigtable/test_cluster.py +++ b/gcloud/bigtable/test_cluster.py @@ -209,12 +209,12 @@ def test_table_factory(self): def test__update_from_pb_success(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES display_name = 'display_name' serve_nodes = 8 - cluster_pb = data_pb2.Cluster( + cluster_pb = data_v1_pb2.Cluster( display_name=display_name, serve_nodes=serve_nodes, ) @@ -228,10 +228,10 @@ def test__update_from_pb_success(self): def test__update_from_pb_no_display_name(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES - cluster_pb = data_pb2.Cluster(serve_nodes=331) + cluster_pb = data_v1_pb2.Cluster(serve_nodes=331) cluster = self._makeOne(None, None, None) self.assertEqual(cluster.display_name, None) self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) @@ -242,10 +242,10 @@ def test__update_from_pb_no_display_name(self): def test__update_from_pb_no_serve_nodes(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES - cluster_pb = data_pb2.Cluster(display_name='name') + cluster_pb = data_v1_pb2.Cluster(display_name='name') cluster = self._makeOne(None, None, None) self.assertEqual(cluster.display_name, None) self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) @@ -256,7 +256,7 @@ def test__update_from_pb_no_serve_nodes(self): def test_from_pb_success(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) project = 'PROJECT' zone = 'zone' @@ -265,7 +265,7 @@ def test_from_pb_success(self): cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) - cluster_pb = data_pb2.Cluster( + cluster_pb = data_v1_pb2.Cluster( name=cluster_name, display_name=cluster_id, serve_nodes=331, @@ -280,10 +280,10 @@ def test_from_pb_success(self): def test_from_pb_bad_cluster_name(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) cluster_name = 'INCORRECT_FORMAT' - cluster_pb = data_pb2.Cluster(name=cluster_name) + cluster_pb = data_v1_pb2.Cluster(name=cluster_name) klass = self._getTargetClass() with self.assertRaises(ValueError): @@ -291,7 +291,7 @@ def test_from_pb_bad_cluster_name(self): def test_from_pb_project_mistmatch(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) project = 'PROJECT' zone = 'zone' @@ -303,7 +303,7 @@ def test_from_pb_project_mistmatch(self): cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) - cluster_pb = data_pb2.Cluster(name=cluster_name) + cluster_pb = data_v1_pb2.Cluster(name=cluster_name) klass = self._getTargetClass() with self.assertRaises(ValueError): @@ -349,9 +349,9 @@ def test___ne__(self): def test_reload(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES @@ -366,12 +366,12 @@ def test_reload(self): # Create request_pb cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) - request_pb = messages_pb2.GetClusterRequest(name=cluster_name) + request_pb = messages_v1_pb2.GetClusterRequest(name=cluster_name) # Create response_pb serve_nodes = 31 display_name = u'hey-hi-hello' - response_pb = data_pb2.Cluster( + response_pb = data_v1_pb2.Cluster( display_name=display_name, serve_nodes=serve_nodes, ) @@ -403,7 +403,7 @@ def test_create(self): from google.longrunning import operations_pb2 from gcloud._testing import _Monkey from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT @@ -425,7 +425,7 @@ def test_create(self): op_name = ('operations/projects/%s/zones/%s/clusters/%s/' 'operations/%d' % (project, zone, cluster_id, op_id)) current_op = operations_pb2.Operation(name=op_name) - response_pb = data_pb2.Cluster(current_operation=current_op) + response_pb = data_v1_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) @@ -465,7 +465,7 @@ def test_update(self): from google.longrunning import operations_pb2 from gcloud._testing import _Monkey from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT @@ -484,7 +484,7 @@ def test_update(self): # Create request_pb cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) - request_pb = data_pb2.Cluster( + request_pb = data_v1_pb2.Cluster( name=cluster_name, display_name=display_name, serve_nodes=serve_nodes, @@ -492,7 +492,7 @@ def test_update(self): # Create response_pb current_op = operations_pb2.Operation() - response_pb = data_pb2.Cluster(current_operation=current_op) + response_pb = data_v1_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) @@ -525,7 +525,7 @@ def mock_process_operation(operation_pb): def test_delete(self): from google.protobuf import empty_pb2 from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project = 'PROJECT' @@ -539,7 +539,7 @@ def test_delete(self): # Create request_pb cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) - request_pb = messages_pb2.DeleteClusterRequest(name=cluster_name) + request_pb = messages_v1_pb2.DeleteClusterRequest(name=cluster_name) # Create response_pb response_pb = empty_pb2.Empty() @@ -564,7 +564,7 @@ def test_undelete(self): from google.longrunning import operations_pb2 from gcloud._testing import _Monkey from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT @@ -579,7 +579,7 @@ def test_undelete(self): # Create request_pb cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) - request_pb = messages_pb2.UndeleteClusterRequest(name=cluster_name) + request_pb = messages_v1_pb2.UndeleteClusterRequest(name=cluster_name) # Create response_pb response_pb = operations_pb2.Operation() @@ -616,7 +616,7 @@ def _list_tables_helper(self, table_id, table_name=None): from gcloud.bigtable._generated import ( bigtable_table_data_pb2 as table_data_pb2) from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as table_messages_pb2) + bigtable_table_service_messages_pb2 as table_messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project = 'PROJECT' @@ -630,11 +630,12 @@ def _list_tables_helper(self, table_id, table_name=None): # Create request_ cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) - request_pb = table_messages_pb2.ListTablesRequest(name=cluster_name) + request_pb = table_messages_v1_pb2.ListTablesRequest( + name=cluster_name) # Create response_pb table_name = table_name or (cluster_name + '/tables/' + table_id) - response_pb = table_messages_pb2.ListTablesResponse( + response_pb = table_messages_v1_pb2.ListTablesResponse( tables=[ table_data_pb2.Table(name=table_name), ], @@ -686,9 +687,9 @@ def _callFUT(self, cluster): def test_it(self): from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable.cluster import Cluster project = 'PROJECT' @@ -702,11 +703,11 @@ def test_it(self): display_name=display_name, serve_nodes=serve_nodes) request_pb = self._callFUT(cluster) self.assertTrue(isinstance(request_pb, - messages_pb2.CreateClusterRequest)) + messages_v1_pb2.CreateClusterRequest)) self.assertEqual(request_pb.cluster_id, cluster_id) self.assertEqual(request_pb.name, 'projects/' + project + '/zones/' + zone) - self.assertTrue(isinstance(request_pb.cluster, data_pb2.Cluster)) + self.assertTrue(isinstance(request_pb.cluster, data_v1_pb2.Cluster)) self.assertEqual(request_pb.cluster.display_name, display_name) self.assertEqual(request_pb.cluster.serve_nodes, serve_nodes) @@ -720,13 +721,14 @@ def _callFUT(self, any_val, expected_type=None): def test_with_known_type_url(self): from google.protobuf import any_pb2 from gcloud._testing import _Monkey - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable import cluster as MUT - type_url = 'type.googleapis.com/' + data_pb2._CELL.full_name - fake_type_url_map = {type_url: data_pb2.Cell} + type_url = 'type.googleapis.com/' + data_v1_pb2._CELL.full_name + fake_type_url_map = {type_url: data_v1_pb2.Cell} - cell = data_pb2.Cell( + cell = data_v1_pb2.Cell( timestamp_micros=0, value=b'foobar', ) @@ -743,19 +745,19 @@ def test_with_create_cluster_metadata(self): from google.protobuf import any_pb2 from google.protobuf.timestamp_pb2 import Timestamp from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) type_url = ('type.googleapis.com/' + - messages_pb2._CREATECLUSTERMETADATA.full_name) - metadata = messages_pb2.CreateClusterMetadata( + messages_v1_pb2._CREATECLUSTERMETADATA.full_name) + metadata = messages_v1_pb2.CreateClusterMetadata( request_time=Timestamp(seconds=1, nanos=1234), finish_time=Timestamp(seconds=10, nanos=891011), - original_request=messages_pb2.CreateClusterRequest( + original_request=messages_v1_pb2.CreateClusterRequest( name='foo', cluster_id='bar', - cluster=data_pb2.Cluster( + cluster=data_v1_pb2.Cluster( display_name='quux', serve_nodes=1337, ), @@ -773,17 +775,17 @@ def test_with_update_cluster_metadata(self): from google.protobuf import any_pb2 from google.protobuf.timestamp_pb2 import Timestamp from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) type_url = ('type.googleapis.com/' + - messages_pb2._UPDATECLUSTERMETADATA.full_name) - metadata = messages_pb2.UpdateClusterMetadata( + messages_v1_pb2._UPDATECLUSTERMETADATA.full_name) + metadata = messages_v1_pb2.UpdateClusterMetadata( request_time=Timestamp(seconds=1, nanos=1234), finish_time=Timestamp(seconds=10, nanos=891011), cancel_time=Timestamp(seconds=100, nanos=76543), - original_request=data_pb2.Cluster( + original_request=data_v1_pb2.Cluster( display_name='the-end', serve_nodes=42, ), @@ -800,13 +802,11 @@ def test_with_undelete_cluster_metadata(self): from google.protobuf import any_pb2 from google.protobuf.timestamp_pb2 import Timestamp from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) - from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) type_url = ('type.googleapis.com/' + - messages_pb2._UNDELETECLUSTERMETADATA.full_name) - metadata = messages_pb2.UndeleteClusterMetadata( + messages_v1_pb2._UNDELETECLUSTERMETADATA.full_name) + metadata = messages_v1_pb2.UndeleteClusterMetadata( request_time=Timestamp(seconds=1, nanos=1234), finish_time=Timestamp(seconds=10, nanos=891011), ) @@ -853,7 +853,7 @@ def test_it(self): from google.longrunning import operations_pb2 from gcloud._testing import _Monkey from gcloud.bigtable._generated import ( - bigtable_cluster_service_messages_pb2 as messages_pb2) + bigtable_cluster_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable import cluster as MUT project = 'PROJECT' @@ -867,7 +867,7 @@ def test_it(self): current_op = operations_pb2.Operation(name=operation_name) # Create mocks. - request_metadata = messages_pb2.CreateClusterMetadata() + request_metadata = messages_v1_pb2.CreateClusterMetadata() parse_pb_any_called = [] def mock_parse_pb_any_to_native(any_val, expected_type=None): @@ -897,10 +897,10 @@ def mock_pb_timestamp_to_datetime(timestamp): def test_op_name_parsing_failure(self): from google.longrunning import operations_pb2 from gcloud.bigtable._generated import ( - bigtable_cluster_data_pb2 as data_pb2) + bigtable_cluster_data_pb2 as data_v1_pb2) current_op = operations_pb2.Operation(name='invalid') - cluster = data_pb2.Cluster(current_operation=current_op) + cluster = data_v1_pb2.Cluster(current_operation=current_op) with self.assertRaises(ValueError): self._callFUT(cluster) diff --git a/gcloud/bigtable/test_column_family.py b/gcloud/bigtable/test_column_family.py index 139a959e0a7b..00384a3df0df 100644 --- a/gcloud/bigtable/test_column_family.py +++ b/gcloud/bigtable/test_column_family.py @@ -108,12 +108,12 @@ def test___ne__same_value(self): def test_to_pb(self): from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) max_num_versions = 1337 gc_rule = self._makeOne(max_num_versions=max_num_versions) pb_val = gc_rule.to_pb() - self.assertEqual(pb_val, - data_pb2.GcRule(max_num_versions=max_num_versions)) + expected = data_v1_pb2.GcRule(max_num_versions=max_num_versions) + self.assertEqual(pb_val, expected) class TestMaxAgeGCRule(unittest2.TestCase): @@ -148,13 +148,13 @@ def test_to_pb(self): import datetime from google.protobuf import duration_pb2 from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) max_age = datetime.timedelta(seconds=1) duration = duration_pb2.Duration(seconds=1) gc_rule = self._makeOne(max_age=max_age) pb_val = gc_rule.to_pb() - self.assertEqual(pb_val, data_pb2.GcRule(max_age=duration)) + self.assertEqual(pb_val, data_v1_pb2.GcRule(max_age=duration)) class TestGCRuleUnion(unittest2.TestCase): @@ -194,21 +194,22 @@ def test_to_pb(self): import datetime from google.protobuf import duration_pb2 from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable.column_family import MaxAgeGCRule from gcloud.bigtable.column_family import MaxVersionsGCRule max_num_versions = 42 rule1 = MaxVersionsGCRule(max_num_versions) - pb_rule1 = data_pb2.GcRule(max_num_versions=max_num_versions) + pb_rule1 = data_v1_pb2.GcRule(max_num_versions=max_num_versions) max_age = datetime.timedelta(seconds=1) rule2 = MaxAgeGCRule(max_age) - pb_rule2 = data_pb2.GcRule(max_age=duration_pb2.Duration(seconds=1)) + pb_rule2 = data_v1_pb2.GcRule( + max_age=duration_pb2.Duration(seconds=1)) rule3 = self._makeOne(rules=[rule1, rule2]) - pb_rule3 = data_pb2.GcRule( - union=data_pb2.GcRule.Union(rules=[pb_rule1, pb_rule2])) + pb_rule3 = data_v1_pb2.GcRule( + union=data_v1_pb2.GcRule.Union(rules=[pb_rule1, pb_rule2])) gc_rule_pb = rule3.to_pb() self.assertEqual(gc_rule_pb, pb_rule3) @@ -217,29 +218,30 @@ def test_to_pb_nested(self): import datetime from google.protobuf import duration_pb2 from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable.column_family import MaxAgeGCRule from gcloud.bigtable.column_family import MaxVersionsGCRule max_num_versions1 = 42 rule1 = MaxVersionsGCRule(max_num_versions1) - pb_rule1 = data_pb2.GcRule(max_num_versions=max_num_versions1) + pb_rule1 = data_v1_pb2.GcRule(max_num_versions=max_num_versions1) max_age = datetime.timedelta(seconds=1) rule2 = MaxAgeGCRule(max_age) - pb_rule2 = data_pb2.GcRule(max_age=duration_pb2.Duration(seconds=1)) + pb_rule2 = data_v1_pb2.GcRule( + max_age=duration_pb2.Duration(seconds=1)) rule3 = self._makeOne(rules=[rule1, rule2]) - pb_rule3 = data_pb2.GcRule( - union=data_pb2.GcRule.Union(rules=[pb_rule1, pb_rule2])) + pb_rule3 = data_v1_pb2.GcRule( + union=data_v1_pb2.GcRule.Union(rules=[pb_rule1, pb_rule2])) max_num_versions2 = 1337 rule4 = MaxVersionsGCRule(max_num_versions2) - pb_rule4 = data_pb2.GcRule(max_num_versions=max_num_versions2) + pb_rule4 = data_v1_pb2.GcRule(max_num_versions=max_num_versions2) rule5 = self._makeOne(rules=[rule3, rule4]) - pb_rule5 = data_pb2.GcRule( - union=data_pb2.GcRule.Union(rules=[pb_rule3, pb_rule4])) + pb_rule5 = data_v1_pb2.GcRule( + union=data_v1_pb2.GcRule.Union(rules=[pb_rule3, pb_rule4])) gc_rule_pb = rule5.to_pb() self.assertEqual(gc_rule_pb, pb_rule5) @@ -282,21 +284,22 @@ def test_to_pb(self): import datetime from google.protobuf import duration_pb2 from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable.column_family import MaxAgeGCRule from gcloud.bigtable.column_family import MaxVersionsGCRule max_num_versions = 42 rule1 = MaxVersionsGCRule(max_num_versions) - pb_rule1 = data_pb2.GcRule(max_num_versions=max_num_versions) + pb_rule1 = data_v1_pb2.GcRule(max_num_versions=max_num_versions) max_age = datetime.timedelta(seconds=1) rule2 = MaxAgeGCRule(max_age) - pb_rule2 = data_pb2.GcRule(max_age=duration_pb2.Duration(seconds=1)) + pb_rule2 = data_v1_pb2.GcRule( + max_age=duration_pb2.Duration(seconds=1)) rule3 = self._makeOne(rules=[rule1, rule2]) - pb_rule3 = data_pb2.GcRule( - intersection=data_pb2.GcRule.Intersection( + pb_rule3 = data_v1_pb2.GcRule( + intersection=data_v1_pb2.GcRule.Intersection( rules=[pb_rule1, pb_rule2])) gc_rule_pb = rule3.to_pb() @@ -306,30 +309,31 @@ def test_to_pb_nested(self): import datetime from google.protobuf import duration_pb2 from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable.column_family import MaxAgeGCRule from gcloud.bigtable.column_family import MaxVersionsGCRule max_num_versions1 = 42 rule1 = MaxVersionsGCRule(max_num_versions1) - pb_rule1 = data_pb2.GcRule(max_num_versions=max_num_versions1) + pb_rule1 = data_v1_pb2.GcRule(max_num_versions=max_num_versions1) max_age = datetime.timedelta(seconds=1) rule2 = MaxAgeGCRule(max_age) - pb_rule2 = data_pb2.GcRule(max_age=duration_pb2.Duration(seconds=1)) + pb_rule2 = data_v1_pb2.GcRule( + max_age=duration_pb2.Duration(seconds=1)) rule3 = self._makeOne(rules=[rule1, rule2]) - pb_rule3 = data_pb2.GcRule( - intersection=data_pb2.GcRule.Intersection( + pb_rule3 = data_v1_pb2.GcRule( + intersection=data_v1_pb2.GcRule.Intersection( rules=[pb_rule1, pb_rule2])) max_num_versions2 = 1337 rule4 = MaxVersionsGCRule(max_num_versions2) - pb_rule4 = data_pb2.GcRule(max_num_versions=max_num_versions2) + pb_rule4 = data_v1_pb2.GcRule(max_num_versions=max_num_versions2) rule5 = self._makeOne(rules=[rule3, rule4]) - pb_rule5 = data_pb2.GcRule( - intersection=data_pb2.GcRule.Intersection( + pb_rule5 = data_v1_pb2.GcRule( + intersection=data_v1_pb2.GcRule.Intersection( rules=[pb_rule3, pb_rule4])) gc_rule_pb = rule5.to_pb() @@ -349,7 +353,8 @@ def test_constructor(self): column_family_id = u'column-family-id' table = object() gc_rule = object() - column_family = self._makeOne(column_family_id, table, gc_rule=gc_rule) + column_family = self._makeOne( + column_family_id, table, gc_rule=gc_rule) self.assertEqual(column_family.column_family_id, column_family_id) self.assertTrue(column_family._table is table) @@ -397,9 +402,9 @@ def test___ne__(self): def _create_test_helper(self, gc_rule=None): from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as messages_pb2) + bigtable_table_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project_id = 'project-id' @@ -413,21 +418,23 @@ def _create_test_helper(self, gc_rule=None): client = _Client(timeout_seconds=timeout_seconds) table = _Table(table_name, client=client) - column_family = self._makeOne(column_family_id, table, gc_rule=gc_rule) + column_family = self._makeOne( + column_family_id, table, gc_rule=gc_rule) # Create request_pb if gc_rule is None: - column_family_pb = data_pb2.ColumnFamily() + column_family_pb = data_v1_pb2.ColumnFamily() else: - column_family_pb = data_pb2.ColumnFamily(gc_rule=gc_rule.to_pb()) - request_pb = messages_pb2.CreateColumnFamilyRequest( + column_family_pb = data_v1_pb2.ColumnFamily( + gc_rule=gc_rule.to_pb()) + request_pb = messages_v1_pb2.CreateColumnFamilyRequest( name=table_name, column_family_id=column_family_id, column_family=column_family_pb, ) # Create response_pb - response_pb = data_pb2.ColumnFamily() + response_pb = data_v1_pb2.ColumnFamily() # Patch the stub used by the API method. client._table_stub = stub = _FakeStub(response_pb) @@ -456,7 +463,7 @@ def test_create_with_gc_rule(self): def _update_test_helper(self, gc_rule=None): from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable._testing import _FakeStub project_id = 'project-id' @@ -467,23 +474,25 @@ def _update_test_helper(self, gc_rule=None): timeout_seconds = 28 table_name = ('projects/' + project_id + '/zones/' + zone + '/clusters/' + cluster_id + '/tables/' + table_id) - column_family_name = table_name + '/columnFamilies/' + column_family_id + column_family_name = ( + table_name + '/columnFamilies/' + column_family_id) client = _Client(timeout_seconds=timeout_seconds) table = _Table(table_name, client=client) - column_family = self._makeOne(column_family_id, table, gc_rule=gc_rule) + column_family = self._makeOne( + column_family_id, table, gc_rule=gc_rule) # Create request_pb if gc_rule is None: - request_pb = data_pb2.ColumnFamily(name=column_family_name) + request_pb = data_v1_pb2.ColumnFamily(name=column_family_name) else: - request_pb = data_pb2.ColumnFamily( + request_pb = data_v1_pb2.ColumnFamily( name=column_family_name, gc_rule=gc_rule.to_pb(), ) # Create response_pb - response_pb = data_pb2.ColumnFamily() + response_pb = data_v1_pb2.ColumnFamily() # Patch the stub used by the API method. client._table_stub = stub = _FakeStub(response_pb) @@ -513,7 +522,7 @@ def test_update_with_gc_rule(self): def test_delete(self): from google.protobuf import empty_pb2 from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as messages_pb2) + bigtable_table_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project_id = 'project-id' @@ -524,14 +533,15 @@ def test_delete(self): timeout_seconds = 7 table_name = ('projects/' + project_id + '/zones/' + zone + '/clusters/' + cluster_id + '/tables/' + table_id) - column_family_name = table_name + '/columnFamilies/' + column_family_id + column_family_name = ( + table_name + '/columnFamilies/' + column_family_id) client = _Client(timeout_seconds=timeout_seconds) table = _Table(table_name, client=client) column_family = self._makeOne(column_family_id, table) # Create request_pb - request_pb = messages_pb2.DeleteColumnFamilyRequest( + request_pb = messages_v1_pb2.DeleteColumnFamilyRequest( name=column_family_name) # Create response_pb @@ -563,9 +573,9 @@ def _callFUT(self, *args, **kwargs): def test_empty(self): from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) - gc_rule_pb = data_pb2.GcRule() + gc_rule_pb = data_v1_pb2.GcRule() self.assertEqual(self._callFUT(gc_rule_pb), None) def test_max_num_versions(self): diff --git a/gcloud/bigtable/test_row.py b/gcloud/bigtable/test_row.py index 9e6da708e6b6..e2336d7520f3 100644 --- a/gcloud/bigtable/test_row.py +++ b/gcloud/bigtable/test_row.py @@ -75,7 +75,8 @@ def _set_cell_helper(self, column=None, column_bytes=None, timestamp_micros=-1): import six import struct - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) row_key = b'row_key' column_family_id = u'column_family_id' @@ -89,8 +90,8 @@ def _set_cell_helper(self, column=None, column_bytes=None, if isinstance(value, six.integer_types): value = struct.pack('>q', value) - expected_pb = data_pb2.Mutation( - set_cell=data_pb2.Mutation.SetCell( + expected_pb = data_v1_pb2.Mutation( + set_cell=data_v1_pb2.Mutation.SetCell( family_name=column_family_id, column_qualifier=column_bytes or column, timestamp_micros=timestamp_micros, @@ -134,15 +135,16 @@ def test_set_cell_with_non_null_timestamp(self): timestamp_micros=millis_granularity) def test_delete(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) row_key = b'row_key' row = self._makeOne(row_key, object()) self.assertEqual(row._pb_mutations, []) row.delete() - expected_pb = data_pb2.Mutation( - delete_from_row=data_pb2.Mutation.DeleteFromRow(), + expected_pb = data_v1_pb2.Mutation( + delete_from_row=data_v1_pb2.Mutation.DeleteFromRow(), ) self.assertEqual(row._pb_mutations, [expected_pb]) @@ -193,7 +195,8 @@ def test_delete_cells_non_iterable(self): row.delete_cells(column_family_id, columns) def test_delete_cells_all_columns(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) row_key = b'row_key' column_family_id = u'column_family_id' @@ -204,8 +207,8 @@ def test_delete_cells_all_columns(self): self.assertEqual(row._pb_mutations, []) row.delete_cells(column_family_id, klass.ALL_COLUMNS) - expected_pb = data_pb2.Mutation( - delete_from_family=data_pb2.Mutation.DeleteFromFamily( + expected_pb = data_v1_pb2.Mutation( + delete_from_family=data_v1_pb2.Mutation.DeleteFromFamily( family_name=column_family_id, ), ) @@ -223,7 +226,8 @@ def test_delete_cells_no_columns(self): self.assertEqual(row._pb_mutations, []) def _delete_cells_helper(self, time_range=None): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) row_key = b'row_key' column = b'column' @@ -235,8 +239,8 @@ def _delete_cells_helper(self, time_range=None): self.assertEqual(row._pb_mutations, []) row.delete_cells(column_family_id, columns, time_range=time_range) - expected_pb = data_pb2.Mutation( - delete_from_column=data_pb2.Mutation.DeleteFromColumn( + expected_pb = data_v1_pb2.Mutation( + delete_from_column=data_v1_pb2.Mutation.DeleteFromColumn( family_name=column_family_id, column_qualifier=column, ), @@ -275,7 +279,8 @@ def test_delete_cells_with_bad_column(self): self.assertEqual(row._pb_mutations, []) def test_delete_cells_with_string_columns(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) row_key = b'row_key' column_family_id = u'column_family_id' @@ -290,14 +295,14 @@ def test_delete_cells_with_string_columns(self): self.assertEqual(row._pb_mutations, []) row.delete_cells(column_family_id, columns) - expected_pb1 = data_pb2.Mutation( - delete_from_column=data_pb2.Mutation.DeleteFromColumn( + expected_pb1 = data_v1_pb2.Mutation( + delete_from_column=data_v1_pb2.Mutation.DeleteFromColumn( family_name=column_family_id, column_qualifier=column1_bytes, ), ) - expected_pb2 = data_pb2.Mutation( - delete_from_column=data_pb2.Mutation.DeleteFromColumn( + expected_pb2 = data_v1_pb2.Mutation( + delete_from_column=data_v1_pb2.Mutation.DeleteFromColumn( family_name=column_family_id, column_qualifier=column2_bytes, ), @@ -306,9 +311,10 @@ def test_delete_cells_with_string_columns(self): def test_commit(self): from google.protobuf import empty_pb2 - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub row_key = b'row_key' @@ -322,15 +328,15 @@ def test_commit(self): # Create request_pb value = b'bytes-value' - mutation = data_pb2.Mutation( - set_cell=data_pb2.Mutation.SetCell( + mutation = data_v1_pb2.Mutation( + set_cell=data_v1_pb2.Mutation.SetCell( family_name=column_family_id, column_qualifier=column, timestamp_micros=-1, # Default value. value=value, ), ) - request_pb = messages_pb2.MutateRowRequest( + request_pb = messages_v1_pb2.MutateRowRequest( table_name=table_name, row_key=row_key, mutations=[mutation], @@ -421,9 +427,10 @@ def test__get_mutations(self): self.assertTrue(false_mutations is row._get_mutations(None)) def test_commit(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable.row_filters import RowSampleFilter @@ -442,29 +449,29 @@ def test_commit(self): # Create request_pb value1 = b'bytes-value' - mutation1 = data_pb2.Mutation( - set_cell=data_pb2.Mutation.SetCell( + mutation1 = data_v1_pb2.Mutation( + set_cell=data_v1_pb2.Mutation.SetCell( family_name=column_family_id1, column_qualifier=column1, timestamp_micros=-1, # Default value. value=value1, ), ) - mutation2 = data_pb2.Mutation( - delete_from_row=data_pb2.Mutation.DeleteFromRow(), + mutation2 = data_v1_pb2.Mutation( + delete_from_row=data_v1_pb2.Mutation.DeleteFromRow(), ) - mutation3 = data_pb2.Mutation( - delete_from_column=data_pb2.Mutation.DeleteFromColumn( + mutation3 = data_v1_pb2.Mutation( + delete_from_column=data_v1_pb2.Mutation.DeleteFromColumn( family_name=column_family_id2, column_qualifier=column2, ), ) - mutation4 = data_pb2.Mutation( - delete_from_family=data_pb2.Mutation.DeleteFromFamily( + mutation4 = data_v1_pb2.Mutation( + delete_from_family=data_v1_pb2.Mutation.DeleteFromFamily( family_name=column_family_id3, ), ) - request_pb = messages_pb2.CheckAndMutateRowRequest( + request_pb = messages_v1_pb2.CheckAndMutateRowRequest( table_name=table_name, row_key=row_key, predicate_filter=row_filter.to_pb(), @@ -474,7 +481,7 @@ def test_commit(self): # Create response_pb predicate_matched = True - response_pb = messages_pb2.CheckAndMutateRowResponse( + response_pb = messages_v1_pb2.CheckAndMutateRowResponse( predicate_matched=predicate_matched) # Patch the stub used by the API method. @@ -560,7 +567,8 @@ def test_clear(self): self.assertEqual(row._rule_pb_list, []) def test_append_cell_value(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) table = object() row_key = b'row_key' @@ -571,13 +579,14 @@ def test_append_cell_value(self): column_family_id = u'column_family_id' value = b'bytes-val' row.append_cell_value(column_family_id, column, value) - expected_pb = data_pb2.ReadModifyWriteRule( + expected_pb = data_v1_pb2.ReadModifyWriteRule( family_name=column_family_id, column_qualifier=column, append_value=value) self.assertEqual(row._rule_pb_list, [expected_pb]) def test_increment_cell_value(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) table = object() row_key = b'row_key' @@ -588,16 +597,17 @@ def test_increment_cell_value(self): column_family_id = u'column_family_id' int_value = 281330 row.increment_cell_value(column_family_id, column, int_value) - expected_pb = data_pb2.ReadModifyWriteRule( + expected_pb = data_v1_pb2.ReadModifyWriteRule( family_name=column_family_id, column_qualifier=column, increment_amount=int_value) self.assertEqual(row._rule_pb_list, [expected_pb]) def test_commit(self): from gcloud._testing import _Monkey - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import row as MUT @@ -613,11 +623,11 @@ def test_commit(self): # Create request_pb value = b'bytes-value' # We will call row.append_cell_value(COLUMN_FAMILY_ID, COLUMN, value). - request_pb = messages_pb2.ReadModifyWriteRowRequest( + request_pb = messages_v1_pb2.ReadModifyWriteRowRequest( table_name=table_name, row_key=row_key, rules=[ - data_pb2.ReadModifyWriteRule( + data_v1_pb2.ReadModifyWriteRule( family_name=column_family_id, column_qualifier=column, append_value=value, @@ -693,7 +703,8 @@ def _callFUT(self, row_response): def test_it(self): from gcloud._helpers import _datetime_from_microseconds - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) col_fam1 = u'col-fam-id' col_fam2 = u'col-fam-id2' @@ -723,28 +734,28 @@ def test_it(self): ], }, } - sample_input = data_pb2.Row( + sample_input = data_v1_pb2.Row( families=[ - data_pb2.Family( + data_v1_pb2.Family( name=col_fam1, columns=[ - data_pb2.Column( + data_v1_pb2.Column( qualifier=col_name1, cells=[ - data_pb2.Cell( + data_v1_pb2.Cell( value=cell_val1, timestamp_micros=microseconds, ), - data_pb2.Cell( + data_v1_pb2.Cell( value=cell_val2, timestamp_micros=microseconds, ), ], ), - data_pb2.Column( + data_v1_pb2.Column( qualifier=col_name2, cells=[ - data_pb2.Cell( + data_v1_pb2.Cell( value=cell_val3, timestamp_micros=microseconds, ), @@ -752,13 +763,13 @@ def test_it(self): ), ], ), - data_pb2.Family( + data_v1_pb2.Family( name=col_fam2, columns=[ - data_pb2.Column( + data_v1_pb2.Column( qualifier=col_name3, cells=[ - data_pb2.Cell( + data_v1_pb2.Cell( value=cell_val4, timestamp_micros=microseconds, ), @@ -779,7 +790,8 @@ def _callFUT(self, family_pb): def test_it(self): from gcloud._helpers import _datetime_from_microseconds - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) col_fam1 = u'col-fam-id' col_name1 = b'col-name1' @@ -800,26 +812,26 @@ def test_it(self): ], } expected_output = (col_fam1, expected_dict) - sample_input = data_pb2.Family( + sample_input = data_v1_pb2.Family( name=col_fam1, columns=[ - data_pb2.Column( + data_v1_pb2.Column( qualifier=col_name1, cells=[ - data_pb2.Cell( + data_v1_pb2.Cell( value=cell_val1, timestamp_micros=microseconds, ), - data_pb2.Cell( + data_v1_pb2.Cell( value=cell_val2, timestamp_micros=microseconds, ), ], ), - data_pb2.Column( + data_v1_pb2.Column( qualifier=col_name2, cells=[ - data_pb2.Cell( + data_v1_pb2.Cell( value=cell_val3, timestamp_micros=microseconds, ), diff --git a/gcloud/bigtable/test_row_data.py b/gcloud/bigtable/test_row_data.py index 56b1c15f0655..ce901fb30dce 100644 --- a/gcloud/bigtable/test_row_data.py +++ b/gcloud/bigtable/test_row_data.py @@ -28,20 +28,20 @@ def _makeOne(self, *args, **kwargs): def _from_pb_test_helper(self, labels=None): import datetime from gcloud._helpers import _EPOCH - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) timestamp_micros = 18738724000 # Make sure millis granularity timestamp = _EPOCH + datetime.timedelta(microseconds=timestamp_micros) value = b'value-bytes' if labels is None: - cell_pb = data_pb2.Cell(value=value, - timestamp_micros=timestamp_micros) + cell_pb = data_v1_pb2.Cell( + value=value, timestamp_micros=timestamp_micros) cell_expected = self._makeOne(value, timestamp) else: - cell_pb = data_pb2.Cell(value=value, - timestamp_micros=timestamp_micros, - labels=labels) + cell_pb = data_v1_pb2.Cell( + value=value, timestamp_micros=timestamp_micros, labels=labels) cell_expected = self._makeOne(value, timestamp, labels=labels) klass = self._getTargetClass() @@ -209,10 +209,10 @@ def test_clear(self): def test__handle_commit_row(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) partial_row_data = self._makeOne(None) - chunk = messages_pb2.ReadRowsResponse.Chunk(commit_row=True) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(commit_row=True) index = last_chunk_index = 1 self.assertFalse(partial_row_data.committed) @@ -221,33 +221,34 @@ def test__handle_commit_row(self): def test__handle_commit_row_false(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) partial_row_data = self._makeOne(None) - chunk = messages_pb2.ReadRowsResponse.Chunk(commit_row=False) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(commit_row=False) with self.assertRaises(ValueError): partial_row_data._handle_commit_row(chunk, None, None) def test__handle_commit_row_not_last_chunk(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) partial_row_data = self._makeOne(None) - chunk = messages_pb2.ReadRowsResponse.Chunk(commit_row=True) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(commit_row=True) with self.assertRaises(ValueError): index = 0 last_chunk_index = 1 self.assertNotEqual(index, last_chunk_index) - partial_row_data._handle_commit_row(chunk, index, last_chunk_index) + partial_row_data._handle_commit_row( + chunk, index, last_chunk_index) def test__handle_reset_row(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) partial_row_data = self._makeOne(None) - chunk = messages_pb2.ReadRowsResponse.Chunk(reset_row=True) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(reset_row=True) # Modify the PartialRowData object so we can check it's been cleared. partial_row_data._cells = {1: 2} @@ -258,33 +259,35 @@ def test__handle_reset_row(self): def test__handle_reset_row_failure(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) partial_row_data = self._makeOne(None) - chunk = messages_pb2.ReadRowsResponse.Chunk(reset_row=False) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(reset_row=False) with self.assertRaises(ValueError): partial_row_data._handle_reset_row(chunk) def test__handle_row_contents(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable.row_data import Cell partial_row_data = self._makeOne(None) - cell1_pb = data_pb2.Cell(timestamp_micros=1, value=b'val1') - cell2_pb = data_pb2.Cell(timestamp_micros=200, value=b'val2') - cell3_pb = data_pb2.Cell(timestamp_micros=300000, value=b'val3') + cell1_pb = data_v1_pb2.Cell(timestamp_micros=1, value=b'val1') + cell2_pb = data_v1_pb2.Cell(timestamp_micros=200, value=b'val2') + cell3_pb = data_v1_pb2.Cell(timestamp_micros=300000, value=b'val3') col1 = b'col1' col2 = b'col2' columns = [ - data_pb2.Column(qualifier=col1, cells=[cell1_pb, cell2_pb]), - data_pb2.Column(qualifier=col2, cells=[cell3_pb]), + data_v1_pb2.Column(qualifier=col1, cells=[cell1_pb, cell2_pb]), + data_v1_pb2.Column(qualifier=col2, cells=[cell3_pb]), ] family_name = u'name' - row_contents = data_pb2.Family(name=family_name, columns=columns) - chunk = messages_pb2.ReadRowsResponse.Chunk(row_contents=row_contents) + row_contents = data_v1_pb2.Family(name=family_name, columns=columns) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk( + row_contents=row_contents) self.assertEqual(partial_row_data.cells, {}) partial_row_data._handle_row_contents(chunk) @@ -297,31 +300,34 @@ def test__handle_row_contents(self): self.assertEqual(partial_row_data.cells, expected_cells) def test_update_from_read_rows(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) row_key = b'row-key' partial_row_data = self._makeOne(row_key) # Set-up chunk1, some data that will be reset by chunk2. ignored_family_name = u'ignore-name' - row_contents = data_pb2.Family(name=ignored_family_name) - chunk1 = messages_pb2.ReadRowsResponse.Chunk(row_contents=row_contents) + row_contents = data_v1_pb2.Family(name=ignored_family_name) + chunk1 = messages_v1_pb2.ReadRowsResponse.Chunk( + row_contents=row_contents) # Set-up chunk2, a reset row. - chunk2 = messages_pb2.ReadRowsResponse.Chunk(reset_row=True) + chunk2 = messages_v1_pb2.ReadRowsResponse.Chunk(reset_row=True) # Set-up chunk3, a column family with no columns. family_name = u'name' - row_contents = data_pb2.Family(name=family_name) - chunk3 = messages_pb2.ReadRowsResponse.Chunk(row_contents=row_contents) + row_contents = data_v1_pb2.Family(name=family_name) + chunk3 = messages_v1_pb2.ReadRowsResponse.Chunk( + row_contents=row_contents) # Set-up chunk4, a commit row. - chunk4 = messages_pb2.ReadRowsResponse.Chunk(commit_row=True) + chunk4 = messages_v1_pb2.ReadRowsResponse.Chunk(commit_row=True) # Prepare request and make sure PartialRowData is empty before. - read_rows_response_pb = messages_pb2.ReadRowsResponse( + read_rows_response_pb = messages_v1_pb2.ReadRowsResponse( row_key=row_key, chunks=[chunk1, chunk2, chunk3, chunk4]) self.assertEqual(partial_row_data.cells, {}) self.assertFalse(partial_row_data.committed) @@ -346,7 +352,7 @@ def test_update_from_read_rows_while_committed(self): def test_update_from_read_rows_row_key_disagree(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) row_key1 = b'row-key1' row_key2 = b'row-key2' @@ -354,7 +360,8 @@ def test_update_from_read_rows_row_key_disagree(self): self.assertFalse(partial_row_data._chunks_encountered) self.assertNotEqual(row_key1, row_key2) - read_rows_response_pb = messages_pb2.ReadRowsResponse(row_key=row_key2) + read_rows_response_pb = messages_v1_pb2.ReadRowsResponse( + row_key=row_key2) with self.assertRaises(ValueError): partial_row_data.update_from_read_rows(read_rows_response_pb) @@ -362,14 +369,14 @@ def test_update_from_read_rows_row_key_disagree(self): def test_update_from_read_rows_empty_chunk(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) row_key = b'row-key' partial_row_data = self._makeOne(row_key) self.assertFalse(partial_row_data._chunks_encountered) - chunk = messages_pb2.ReadRowsResponse.Chunk() - read_rows_response_pb = messages_pb2.ReadRowsResponse( + chunk = messages_v1_pb2.ReadRowsResponse.Chunk() + read_rows_response_pb = messages_v1_pb2.ReadRowsResponse( row_key=row_key, chunks=[chunk]) # This makes it an "empty" chunk. @@ -451,11 +458,11 @@ def test_cancel(self): def test_consume_next(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable.row_data import PartialRowData row_key = b'row-key' - value_pb = messages_pb2.ReadRowsResponse(row_key=row_key) + value_pb = messages_v1_pb2.ReadRowsResponse(row_key=row_key) response_iterator = _MockCancellableIterator(value_pb) partial_rows_data = self._makeOne(response_iterator) self.assertEqual(partial_rows_data.rows, {}) @@ -465,13 +472,13 @@ def test_consume_next(self): def test_consume_next_row_exists(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable.row_data import PartialRowData row_key = b'row-key' - chunk = messages_pb2.ReadRowsResponse.Chunk(commit_row=True) - value_pb = messages_pb2.ReadRowsResponse(row_key=row_key, - chunks=[chunk]) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(commit_row=True) + value_pb = messages_v1_pb2.ReadRowsResponse( + row_key=row_key, chunks=[chunk]) response_iterator = _MockCancellableIterator(value_pb) partial_rows_data = self._makeOne(response_iterator) existing_values = PartialRowData(row_key) @@ -495,7 +502,8 @@ def test_consume_all(self): partial_rows_data = klass(response_iterator) self.assertEqual(partial_rows_data._consumed, []) partial_rows_data.consume_all() - self.assertEqual(partial_rows_data._consumed, [value1, value2, value3]) + self.assertEqual( + partial_rows_data._consumed, [value1, value2, value3]) def test_consume_all_with_max_loops(self): klass = self._getDoNothingClass() @@ -507,7 +515,8 @@ def test_consume_all_with_max_loops(self): partial_rows_data.consume_all(max_loops=1) self.assertEqual(partial_rows_data._consumed, [value1]) # Make sure the iterator still has the remaining values. - self.assertEqual(list(response_iterator.iter_values), [value2, value3]) + self.assertEqual( + list(response_iterator.iter_values), [value2, value3]) class _MockCancellableIterator(object): diff --git a/gcloud/bigtable/test_row_filters.py b/gcloud/bigtable/test_row_filters.py index aed90574683f..768ffb79bd32 100644 --- a/gcloud/bigtable/test_row_filters.py +++ b/gcloud/bigtable/test_row_filters.py @@ -60,12 +60,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) flag = True row_filter = self._makeOne(flag) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(sink=flag) + expected_pb = data_v1_pb2.RowFilter(sink=flag) self.assertEqual(pb_val, expected_pb) @@ -79,12 +80,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) flag = True row_filter = self._makeOne(flag) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(pass_all_filter=flag) + expected_pb = data_v1_pb2.RowFilter(pass_all_filter=flag) self.assertEqual(pb_val, expected_pb) @@ -98,12 +100,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) flag = True row_filter = self._makeOne(flag) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(block_all_filter=flag) + expected_pb = data_v1_pb2.RowFilter(block_all_filter=flag) self.assertEqual(pb_val, expected_pb) @@ -156,12 +159,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) regex = b'row-key-regex' row_filter = self._makeOne(regex) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(row_key_regex_filter=regex) + expected_pb = data_v1_pb2.RowFilter(row_key_regex_filter=regex) self.assertEqual(pb_val, expected_pb) @@ -192,12 +196,13 @@ def test___eq__same_value(self): self.assertEqual(row_filter1, row_filter2) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) sample = 0.25 row_filter = self._makeOne(sample) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(row_sample_filter=sample) + expected_pb = data_v1_pb2.RowFilter(row_sample_filter=sample) self.assertEqual(pb_val, expected_pb) @@ -211,12 +216,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) regex = u'family-regex' row_filter = self._makeOne(regex) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(family_name_regex_filter=regex) + expected_pb = data_v1_pb2.RowFilter(family_name_regex_filter=regex) self.assertEqual(pb_val, expected_pb) @@ -230,12 +236,14 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) regex = b'column-regex' row_filter = self._makeOne(regex) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(column_qualifier_regex_filter=regex) + expected_pb = data_v1_pb2.RowFilter( + column_qualifier_regex_filter=regex) self.assertEqual(pb_val, expected_pb) @@ -280,7 +288,8 @@ def test___ne__same_value(self): def _to_pb_helper(self, start_micros=None, end_micros=None): import datetime from gcloud._helpers import _EPOCH - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) pb_kwargs = {} @@ -294,7 +303,7 @@ def _to_pb_helper(self, start_micros=None, end_micros=None): pb_kwargs['end_timestamp_micros'] = end_micros time_range = self._makeOne(start=start, end=end) - expected_pb = data_pb2.TimestampRange(**pb_kwargs) + expected_pb = data_v1_pb2.TimestampRange(**pb_kwargs) self.assertEqual(time_range.to_pb(), expected_pb) def test_to_pb(self): @@ -342,14 +351,15 @@ def test___eq__same_value(self): self.assertEqual(row_filter1, row_filter2) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import TimestampRange range_ = TimestampRange() row_filter = self._makeOne(range_) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter( - timestamp_range_filter=data_pb2.TimestampRange()) + expected_pb = data_v1_pb2.RowFilter( + timestamp_range_filter=data_v1_pb2.TimestampRange()) self.assertEqual(pb_val, expected_pb) @@ -377,10 +387,12 @@ def test_constructor_explicit(self): end_column = object() inclusive_start = object() inclusive_end = object() - row_filter = self._makeOne(column_family_id, start_column=start_column, - end_column=end_column, - inclusive_start=inclusive_start, - inclusive_end=inclusive_end) + row_filter = self._makeOne( + column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end) self.assertTrue(row_filter.column_family_id is column_family_id) self.assertTrue(row_filter.start_column is start_column) self.assertTrue(row_filter.end_column is end_column) @@ -422,66 +434,71 @@ def test___eq__type_differ(self): self.assertNotEqual(row_filter1, row_filter2) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) column_family_id = u'column-family-id' row_filter = self._makeOne(column_family_id) - col_range_pb = data_pb2.ColumnRange(family_name=column_family_id) - expected_pb = data_pb2.RowFilter(column_range_filter=col_range_pb) + col_range_pb = data_v1_pb2.ColumnRange(family_name=column_family_id) + expected_pb = data_v1_pb2.RowFilter(column_range_filter=col_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_inclusive_start(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) column_family_id = u'column-family-id' column = b'column' row_filter = self._makeOne(column_family_id, start_column=column) - col_range_pb = data_pb2.ColumnRange( + col_range_pb = data_v1_pb2.ColumnRange( family_name=column_family_id, start_qualifier_inclusive=column, ) - expected_pb = data_pb2.RowFilter(column_range_filter=col_range_pb) + expected_pb = data_v1_pb2.RowFilter(column_range_filter=col_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_exclusive_start(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) column_family_id = u'column-family-id' column = b'column' row_filter = self._makeOne(column_family_id, start_column=column, inclusive_start=False) - col_range_pb = data_pb2.ColumnRange( + col_range_pb = data_v1_pb2.ColumnRange( family_name=column_family_id, start_qualifier_exclusive=column, ) - expected_pb = data_pb2.RowFilter(column_range_filter=col_range_pb) + expected_pb = data_v1_pb2.RowFilter(column_range_filter=col_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_inclusive_end(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) column_family_id = u'column-family-id' column = b'column' row_filter = self._makeOne(column_family_id, end_column=column) - col_range_pb = data_pb2.ColumnRange( + col_range_pb = data_v1_pb2.ColumnRange( family_name=column_family_id, end_qualifier_inclusive=column, ) - expected_pb = data_pb2.RowFilter(column_range_filter=col_range_pb) + expected_pb = data_v1_pb2.RowFilter(column_range_filter=col_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_exclusive_end(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) column_family_id = u'column-family-id' column = b'column' row_filter = self._makeOne(column_family_id, end_column=column, inclusive_end=False) - col_range_pb = data_pb2.ColumnRange( + col_range_pb = data_v1_pb2.ColumnRange( family_name=column_family_id, end_qualifier_exclusive=column, ) - expected_pb = data_pb2.RowFilter(column_range_filter=col_range_pb) + expected_pb = data_v1_pb2.RowFilter(column_range_filter=col_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) @@ -495,12 +512,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) regex = b'value-regex' row_filter = self._makeOne(regex) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(value_regex_filter=regex) + expected_pb = data_v1_pb2.RowFilter(value_regex_filter=regex) self.assertEqual(pb_val, expected_pb) @@ -561,47 +579,52 @@ def test___eq__type_differ(self): self.assertNotEqual(row_filter1, row_filter2) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) row_filter = self._makeOne() - expected_pb = data_pb2.RowFilter( - value_range_filter=data_pb2.ValueRange()) + expected_pb = data_v1_pb2.RowFilter( + value_range_filter=data_v1_pb2.ValueRange()) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_inclusive_start(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) value = b'some-value' row_filter = self._makeOne(start_value=value) - val_range_pb = data_pb2.ValueRange(start_value_inclusive=value) - expected_pb = data_pb2.RowFilter(value_range_filter=val_range_pb) + val_range_pb = data_v1_pb2.ValueRange(start_value_inclusive=value) + expected_pb = data_v1_pb2.RowFilter(value_range_filter=val_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_exclusive_start(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) value = b'some-value' row_filter = self._makeOne(start_value=value, inclusive_start=False) - val_range_pb = data_pb2.ValueRange(start_value_exclusive=value) - expected_pb = data_pb2.RowFilter(value_range_filter=val_range_pb) + val_range_pb = data_v1_pb2.ValueRange(start_value_exclusive=value) + expected_pb = data_v1_pb2.RowFilter(value_range_filter=val_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_inclusive_end(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) value = b'some-value' row_filter = self._makeOne(end_value=value) - val_range_pb = data_pb2.ValueRange(end_value_inclusive=value) - expected_pb = data_pb2.RowFilter(value_range_filter=val_range_pb) + val_range_pb = data_v1_pb2.ValueRange(end_value_inclusive=value) + expected_pb = data_v1_pb2.RowFilter(value_range_filter=val_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) def test_to_pb_exclusive_end(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) value = b'some-value' row_filter = self._makeOne(end_value=value, inclusive_end=False) - val_range_pb = data_pb2.ValueRange(end_value_exclusive=value) - expected_pb = data_pb2.RowFilter(value_range_filter=val_range_pb) + val_range_pb = data_v1_pb2.ValueRange(end_value_exclusive=value) + expected_pb = data_v1_pb2.RowFilter(value_range_filter=val_range_pb) self.assertEqual(row_filter.to_pb(), expected_pb) @@ -649,12 +672,14 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) num_cells = 76 row_filter = self._makeOne(num_cells) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(cells_per_row_offset_filter=num_cells) + expected_pb = data_v1_pb2.RowFilter( + cells_per_row_offset_filter=num_cells) self.assertEqual(pb_val, expected_pb) @@ -668,12 +693,14 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) num_cells = 189 row_filter = self._makeOne(num_cells) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(cells_per_row_limit_filter=num_cells) + expected_pb = data_v1_pb2.RowFilter( + cells_per_row_limit_filter=num_cells) self.assertEqual(pb_val, expected_pb) @@ -687,12 +714,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) num_cells = 10 row_filter = self._makeOne(num_cells) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter( + expected_pb = data_v1_pb2.RowFilter( cells_per_column_limit_filter=num_cells) self.assertEqual(pb_val, expected_pb) @@ -707,12 +735,13 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) flag = True row_filter = self._makeOne(flag) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(strip_value_transformer=flag) + expected_pb = data_v1_pb2.RowFilter(strip_value_transformer=flag) self.assertEqual(pb_val, expected_pb) @@ -743,12 +772,13 @@ def test___eq__same_value(self): self.assertEqual(row_filter1, row_filter2) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) label = u'label' row_filter = self._makeOne(label) pb_val = row_filter.to_pb() - expected_pb = data_pb2.RowFilter(apply_label_transformer=label) + expected_pb = data_v1_pb2.RowFilter(apply_label_transformer=label) self.assertEqual(pb_val, expected_pb) @@ -793,7 +823,8 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import RowSampleFilter from gcloud.bigtable.row_filters import StripValueTransformerFilter @@ -806,15 +837,16 @@ def test_to_pb(self): row_filter3 = self._makeOne(filters=[row_filter1, row_filter2]) filter_pb = row_filter3.to_pb() - expected_pb = data_pb2.RowFilter( - chain=data_pb2.RowFilter.Chain( + expected_pb = data_v1_pb2.RowFilter( + chain=data_v1_pb2.RowFilter.Chain( filters=[row_filter1_pb, row_filter2_pb], ), ) self.assertEqual(filter_pb, expected_pb) def test_to_pb_nested(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import CellsRowLimitFilter from gcloud.bigtable.row_filters import RowSampleFilter from gcloud.bigtable.row_filters import StripValueTransformerFilter @@ -831,8 +863,8 @@ def test_to_pb_nested(self): row_filter5 = self._makeOne(filters=[row_filter3, row_filter4]) filter_pb = row_filter5.to_pb() - expected_pb = data_pb2.RowFilter( - chain=data_pb2.RowFilter.Chain( + expected_pb = data_v1_pb2.RowFilter( + chain=data_v1_pb2.RowFilter.Chain( filters=[row_filter3_pb, row_filter4_pb], ), ) @@ -849,7 +881,8 @@ def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import RowSampleFilter from gcloud.bigtable.row_filters import StripValueTransformerFilter @@ -862,15 +895,16 @@ def test_to_pb(self): row_filter3 = self._makeOne(filters=[row_filter1, row_filter2]) filter_pb = row_filter3.to_pb() - expected_pb = data_pb2.RowFilter( - interleave=data_pb2.RowFilter.Interleave( + expected_pb = data_v1_pb2.RowFilter( + interleave=data_v1_pb2.RowFilter.Interleave( filters=[row_filter1_pb, row_filter2_pb], ), ) self.assertEqual(filter_pb, expected_pb) def test_to_pb_nested(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import CellsRowLimitFilter from gcloud.bigtable.row_filters import RowSampleFilter from gcloud.bigtable.row_filters import StripValueTransformerFilter @@ -887,8 +921,8 @@ def test_to_pb_nested(self): row_filter5 = self._makeOne(filters=[row_filter3, row_filter4]) filter_pb = row_filter5.to_pb() - expected_pb = data_pb2.RowFilter( - interleave=data_pb2.RowFilter.Interleave( + expected_pb = data_v1_pb2.RowFilter( + interleave=data_v1_pb2.RowFilter.Interleave( filters=[row_filter3_pb, row_filter4_pb], ), ) @@ -938,7 +972,8 @@ def test___eq__type_differ(self): self.assertNotEqual(cond_filter1, cond_filter2) def test_to_pb(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import CellsRowOffsetFilter from gcloud.bigtable.row_filters import RowSampleFilter from gcloud.bigtable.row_filters import StripValueTransformerFilter @@ -956,8 +991,8 @@ def test_to_pb(self): false_filter=row_filter3) filter_pb = row_filter4.to_pb() - expected_pb = data_pb2.RowFilter( - condition=data_pb2.RowFilter.Condition( + expected_pb = data_v1_pb2.RowFilter( + condition=data_v1_pb2.RowFilter.Condition( predicate_filter=row_filter1_pb, true_filter=row_filter2_pb, false_filter=row_filter3_pb, @@ -966,7 +1001,8 @@ def test_to_pb(self): self.assertEqual(filter_pb, expected_pb) def test_to_pb_true_only(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import RowSampleFilter from gcloud.bigtable.row_filters import StripValueTransformerFilter @@ -979,8 +1015,8 @@ def test_to_pb_true_only(self): row_filter3 = self._makeOne(row_filter1, true_filter=row_filter2) filter_pb = row_filter3.to_pb() - expected_pb = data_pb2.RowFilter( - condition=data_pb2.RowFilter.Condition( + expected_pb = data_v1_pb2.RowFilter( + condition=data_v1_pb2.RowFilter.Condition( predicate_filter=row_filter1_pb, true_filter=row_filter2_pb, ), @@ -988,7 +1024,8 @@ def test_to_pb_true_only(self): self.assertEqual(filter_pb, expected_pb) def test_to_pb_false_only(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 + from gcloud.bigtable._generated import ( + bigtable_data_pb2 as data_v1_pb2) from gcloud.bigtable.row_filters import RowSampleFilter from gcloud.bigtable.row_filters import StripValueTransformerFilter @@ -1001,8 +1038,8 @@ def test_to_pb_false_only(self): row_filter3 = self._makeOne(row_filter1, false_filter=row_filter2) filter_pb = row_filter3.to_pb() - expected_pb = data_pb2.RowFilter( - condition=data_pb2.RowFilter.Condition( + expected_pb = data_v1_pb2.RowFilter( + condition=data_v1_pb2.RowFilter.Condition( predicate_filter=row_filter1_pb, false_filter=row_filter2_pb, ), diff --git a/gcloud/bigtable/test_table.py b/gcloud/bigtable/test_table.py index 09d5baba225d..0f015777aadf 100644 --- a/gcloud/bigtable/test_table.py +++ b/gcloud/bigtable/test_table.py @@ -126,9 +126,9 @@ def test___ne__(self): def _create_test_helper(self, initial_split_keys): from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as messages_pb2) + bigtable_table_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project_id = 'project-id' @@ -144,14 +144,14 @@ def _create_test_helper(self, initial_split_keys): table = self._makeOne(table_id, cluster) # Create request_pb - request_pb = messages_pb2.CreateTableRequest( + request_pb = messages_v1_pb2.CreateTableRequest( initial_split_keys=initial_split_keys, name=cluster_name, table_id=table_id, ) # Create response_pb - response_pb = data_pb2.Table() + response_pb = data_v1_pb2.Table() # Patch the stub used by the API method. client._table_stub = stub = _FakeStub(response_pb) @@ -178,9 +178,9 @@ def test_create_with_split_keys(self): def _list_column_families_helper(self, column_family_name=None): from gcloud.bigtable._generated import ( - bigtable_table_data_pb2 as data_pb2) + bigtable_table_data_pb2 as data_v1_pb2) from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as messages_pb2) + bigtable_table_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project_id = 'project-id' @@ -197,15 +197,15 @@ def _list_column_families_helper(self, column_family_name=None): # Create request_pb table_name = cluster_name + '/tables/' + table_id - request_pb = messages_pb2.GetTableRequest(name=table_name) + request_pb = messages_v1_pb2.GetTableRequest(name=table_name) # Create response_pb column_family_id = 'foo' if column_family_name is None: column_family_name = (table_name + '/columnFamilies/' + column_family_id) - column_family = data_pb2.ColumnFamily(name=column_family_name) - response_pb = data_pb2.Table( + column_family = data_v1_pb2.ColumnFamily(name=column_family_name) + response_pb = data_v1_pb2.Table( column_families={column_family_id: column_family}, ) @@ -238,7 +238,7 @@ def test_list_column_families_failure(self): def test_delete(self): from google.protobuf import empty_pb2 from gcloud.bigtable._generated import ( - bigtable_table_service_messages_pb2 as messages_pb2) + bigtable_table_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project_id = 'project-id' @@ -255,7 +255,7 @@ def test_delete(self): # Create request_pb table_name = cluster_name + '/tables/' + table_id - request_pb = messages_pb2.DeleteTableRequest(name=table_name) + request_pb = messages_v1_pb2.DeleteTableRequest(name=table_name) # Create response_pb response_pb = empty_pb2.Empty() @@ -278,7 +278,7 @@ def test_delete(self): def _read_row_helper(self, chunks): from gcloud._testing import _Monkey from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable.row_data import PartialRowData from gcloud.bigtable import table as MUT @@ -304,8 +304,8 @@ def mock_create_row_request(table_name, row_key, filter_): # Create response_iterator row_key = b'row-key' - response_pb = messages_pb2.ReadRowsResponse(row_key=row_key, - chunks=chunks) + response_pb = messages_v1_pb2.ReadRowsResponse( + row_key=row_key, chunks=chunks) response_iterator = [response_pb] # Patch the stub used by the API method. @@ -334,9 +334,9 @@ def mock_create_row_request(table_name, row_key, filter_): def test_read_row(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) - chunk = messages_pb2.ReadRowsResponse.Chunk(commit_row=True) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(commit_row=True) chunks = [chunk] self._read_row_helper(chunks) @@ -346,10 +346,10 @@ def test_read_empty_row(self): def test_read_row_still_partial(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) # There is never a "commit row". - chunk = messages_pb2.ReadRowsResponse.Chunk(reset_row=True) + chunk = messages_v1_pb2.ReadRowsResponse.Chunk(reset_row=True) chunks = [chunk] with self.assertRaises(ValueError): self._read_row_helper(chunks) @@ -416,7 +416,7 @@ def mock_create_row_request(table_name, **kwargs): def test_sample_row_keys(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable._testing import _FakeStub project_id = 'project-id' @@ -433,7 +433,8 @@ def test_sample_row_keys(self): # Create request_pb table_name = cluster_name + '/tables/' + table_id - request_pb = messages_pb2.SampleRowKeysRequest(table_name=table_name) + request_pb = messages_v1_pb2.SampleRowKeysRequest( + table_name=table_name) # Create response_iterator response_iterator = object() # Just passed to a mock. @@ -466,11 +467,12 @@ def _callFUT(self, table_name, row_key=None, start_key=None, end_key=None, def test_table_name_only(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) table_name = 'table_name' result = self._callFUT(table_name) - expected_result = messages_pb2.ReadRowsRequest(table_name=table_name) + expected_result = messages_v1_pb2.ReadRowsRequest( + table_name=table_name) self.assertEqual(result, expected_result) def test_row_key_row_range_conflict(self): @@ -479,70 +481,74 @@ def test_row_key_row_range_conflict(self): def test_row_key(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) table_name = 'table_name' row_key = b'row_key' result = self._callFUT(table_name, row_key=row_key) - expected_result = messages_pb2.ReadRowsRequest( + expected_result = messages_v1_pb2.ReadRowsRequest( table_name=table_name, row_key=row_key, ) self.assertEqual(result, expected_result) def test_row_range_start_key(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) table_name = 'table_name' start_key = b'start_key' result = self._callFUT(table_name, start_key=start_key) - expected_result = messages_pb2.ReadRowsRequest( + expected_result = messages_v1_pb2.ReadRowsRequest( table_name=table_name, - row_range=data_pb2.RowRange(start_key=start_key), + row_range=data_v1_pb2.RowRange(start_key=start_key), ) self.assertEqual(result, expected_result) def test_row_range_end_key(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) table_name = 'table_name' end_key = b'end_key' result = self._callFUT(table_name, end_key=end_key) - expected_result = messages_pb2.ReadRowsRequest( + expected_result = messages_v1_pb2.ReadRowsRequest( table_name=table_name, - row_range=data_pb2.RowRange(end_key=end_key), + row_range=data_v1_pb2.RowRange(end_key=end_key), ) self.assertEqual(result, expected_result) def test_row_range_both_keys(self): - from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_data_pb2 as data_v1_pb2) + from gcloud.bigtable._generated import ( + bigtable_service_messages_pb2 as messages_v1_pb2) table_name = 'table_name' start_key = b'start_key' end_key = b'end_key' result = self._callFUT(table_name, start_key=start_key, end_key=end_key) - expected_result = messages_pb2.ReadRowsRequest( + expected_result = messages_v1_pb2.ReadRowsRequest( table_name=table_name, - row_range=data_pb2.RowRange(start_key=start_key, end_key=end_key), + row_range=data_v1_pb2.RowRange( + start_key=start_key, end_key=end_key), ) self.assertEqual(result, expected_result) def test_with_filter(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) from gcloud.bigtable.row_filters import RowSampleFilter table_name = 'table_name' row_filter = RowSampleFilter(0.33) result = self._callFUT(table_name, filter_=row_filter) - expected_result = messages_pb2.ReadRowsRequest( + expected_result = messages_v1_pb2.ReadRowsRequest( table_name=table_name, filter=row_filter.to_pb(), ) @@ -550,13 +556,13 @@ def test_with_filter(self): def test_with_allow_row_interleaving(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) table_name = 'table_name' allow_row_interleaving = True result = self._callFUT(table_name, allow_row_interleaving=allow_row_interleaving) - expected_result = messages_pb2.ReadRowsRequest( + expected_result = messages_v1_pb2.ReadRowsRequest( table_name=table_name, allow_row_interleaving=allow_row_interleaving, ) @@ -564,12 +570,12 @@ def test_with_allow_row_interleaving(self): def test_with_limit(self): from gcloud.bigtable._generated import ( - bigtable_service_messages_pb2 as messages_pb2) + bigtable_service_messages_pb2 as messages_v1_pb2) table_name = 'table_name' limit = 1337 result = self._callFUT(table_name, limit=limit) - expected_result = messages_pb2.ReadRowsRequest( + expected_result = messages_v1_pb2.ReadRowsRequest( table_name=table_name, num_rows_limit=limit, )