From 09ece55795159bdc2ba15e9a6bf05fcf4746caae Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 11:53:10 -0500 Subject: [PATCH 1/5] #485: drop Query dep. on Dataset object. Instead, 'dataset_id' and 'connection' are passed in, or derived from the implicit environ. Fixes #485. --- gcloud/datastore/query.py | 84 ++++--- gcloud/datastore/test_query.py | 439 ++++++++++++++++----------------- 2 files changed, 266 insertions(+), 257 deletions(-) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index a0920aebe5be..a5f9f8d696b4 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -19,7 +19,6 @@ from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers -from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key @@ -32,8 +31,9 @@ class Query(_implicit_environ._DatastoreBase): :type kind: string. :param kind: The kind to query. - :type dataset: :class:`gcloud.datastore.dataset.Dataset`. - :param dataset: The dataset to query. + :type dataset_id: str + :param dataset_id: The ID of the dataset to query. If not passed, + uses the implicit default. :type namespace: string or None. :param namespace: The namespace to which to restrict results. @@ -54,6 +54,9 @@ class Query(_implicit_environ._DatastoreBase): :type group_by: sequence_of_string. :param group_by: field names used to group query results. + + :raises: ValueError if ``dataset_id`` is not passed and no implicit + default is set. """ OPERATORS = { @@ -66,15 +69,22 @@ class Query(_implicit_environ._DatastoreBase): """Mapping of operator strings and their protobuf equivalents.""" def __init__(self, + dataset_id=None, kind=None, - dataset=None, namespace=None, ancestor=None, filters=(), projection=(), order=(), group_by=()): - super(Query, self).__init__(dataset=dataset) + + if dataset_id is None: + dataset_id = _implicit_environ.DATASET_ID + + if dataset_id is None: + raise ValueError("No dataset ID supplied, and no default set.") + + self._dataset_id = dataset_id self._kind = kind self._namespace = namespace self._ancestor = ancestor @@ -84,26 +94,12 @@ def __init__(self, self._group_by = list(group_by) @property - def dataset(self): - """Get the dataset for this Query. - - The dataset against which the Query will be run. - - :rtype: :class:`gcloud.datastore.dataset.Dataset` or None, - :returns: the current dataset. - """ - return self._dataset - - @dataset.setter - def dataset(self, value): - """Set the dataset for the query + def dataset_id(self): + """Get the dataset ID for this Query. - :type value: class:`gcloud.datastore.dataset.Dataset` - :param value: the new dataset + :rtype: str """ - if not isinstance(value, Dataset): - raise ValueError("Dataset must be a Dataset") - self._dataset = value + return self._dataset_id @property def namespace(self): @@ -294,34 +290,49 @@ def group_by(self, value): value = [value] self._group_by[:] = value - def fetch(self, limit=0, offset=0, start_cursor=None, end_cursor=None): + def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, + connection=None): """Execute the Query; return an iterator for the matching entities. For example:: - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id') - >>> query = dataset.query('Person').filter('name', '=', 'Sally') + >>> from gcloud.datastore.query import Query + >>> query = Query('dataset-id', 'Person') + >>> query.add_filter('name', '=', 'Sally') >>> list(query.fetch()) [, , ...] >>> list(query.fetch(1)) [] - :type limit: integer + :type limit: integer or None :param limit: An optional limit passed through to the iterator. - :type limit: offset - :param limit: An optional offset passed through to the iterator. + :type offset: integer + :param offset: An optional offset passed through to the iterator. - :type start_cursor: offset + :type start_cursor: bytes :param start_cursor: An optional cursor passed through to the iterator. - :type end_cursor: offset + :type end_cursor: bytes :param end_cursor: An optional cursor passed through to the iterator. + :type connection: :class:`gcloud.datastore.connection.Connection` + :param connection: An optional cursor passed through to the iterator. + If not supplied, uses the implicit default. + + :rtype: :class:`Iterator` + :raises: ValueError if ``connection`` is not passed and no implicit + default has been set. """ - return Iterator(self, limit, offset, start_cursor, end_cursor) + if connection is None: + connection = _implicit_environ.CONNECTION + + if connection is None: + raise ValueError("No connection passed, and no default set") + + return Iterator( + self, connection, limit, offset, start_cursor, end_cursor) class Iterator(object): @@ -334,9 +345,10 @@ class Iterator(object): datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, ) - def __init__(self, query, limit=None, offset=0, + def __init__(self, query, connection, limit=None, offset=0, start_cursor=None, end_cursor=None): self._query = query + self._connection = connection self._limit = limit self._offset = offset self._start_cursor = start_cursor @@ -366,9 +378,9 @@ def next_page(self): pb.offset = self._offset - query_results = self._query.dataset.connection().run_query( + query_results = self._connection.run_query( query_pb=pb, - dataset_id=self._query.dataset.id(), + dataset_id=self._query.dataset_id, namespace=self._query.namespace, ) # NOTE: `query_results` contains an extra value that we don't use, diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 08fb628ab24d..67ab27e44bc7 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -17,15 +17,6 @@ class TestQuery(unittest2.TestCase): - def setUp(self): - from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET - _implicit_environ.DATASET = None - - def tearDown(self): - from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset - def _getTargetClass(self): from gcloud.datastore.query import Query return Query @@ -33,9 +24,17 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def test_ctor_defaults(self): - query = self._getTargetClass()() - self.assertEqual(query.dataset, None) + def test_ctor_defaults_wo_implicit_dataset_id(self): + _DATASET = 'DATASET' + self.assertRaises(ValueError, self._makeOne) + + def test_ctor_defaults_w_implicit_dataset_id(self): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + _DATASET = 'DATASET' + with _Monkey(_implicit_environ, DATASET_ID=_DATASET): + query = self._makeOne() + self.assertEqual(query.dataset_id, _DATASET) self.assertEqual(query.kind, None) self.assertEqual(query.namespace, None) self.assertEqual(query.ancestor, None) @@ -45,20 +44,18 @@ def test_ctor_defaults(self): self.assertEqual(query.group_by, []) def test_ctor_explicit(self): - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key _DATASET = 'DATASET' _KIND = 'KIND' _NAMESPACE = 'NAMESPACE' - dataset = Dataset(_DATASET) ancestor = Key('ANCESTOR', 123, dataset_id=_DATASET) FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)] PROJECTION = ['foo', 'bar', 'baz'] ORDER = ['foo', 'bar'] GROUP_BY = ['foo'] query = self._makeOne( + dataset_id=_DATASET, kind=_KIND, - dataset=dataset, namespace=_NAMESPACE, ancestor=ancestor, filters=FILTERS, @@ -66,7 +63,7 @@ def test_ctor_explicit(self): order=ORDER, group_by=GROUP_BY, ) - self.assertTrue(query.dataset is dataset) + self.assertEqual(query.dataset_id, _DATASET) self.assertEqual(query.kind, _KIND) self.assertEqual(query.namespace, _NAMESPACE) self.assertEqual(query.ancestor.path, ancestor.path) @@ -75,26 +72,9 @@ def test_ctor_explicit(self): self.assertEqual(query.order, ORDER) self.assertEqual(query.group_by, GROUP_BY) - def test_dataset_setter_w_non_dataset(self): - query = self._makeOne() - - def _assign(val): - query.dataset = val - - self.assertRaises(ValueError, _assign, object()) - - def test_dataset_setter(self): - from gcloud.datastore.dataset import Dataset - _DATASET = 'DATASET' - _KIND = 'KIND' - dataset = Dataset(_DATASET) - query = self._makeOne(_KIND) - query.dataset = dataset - self.assertTrue(query.dataset is dataset) - self.assertEqual(query.kind, _KIND) - def test_namespace_setter_w_non_string(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) def _assign(val): query.namespace = val @@ -102,17 +82,16 @@ def _assign(val): self.assertRaises(ValueError, _assign, object()) def test_namespace_setter(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' - dataset = Dataset(_DATASET) - query = self._makeOne(dataset=dataset) + query = self._makeOne(_DATASET) query.namespace = _NAMESPACE - self.assertTrue(query.dataset is dataset) + self.assertEqual(query.dataset_id, _DATASET) self.assertEqual(query.namespace, _NAMESPACE) def test_kind_setter_w_non_string(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) def _assign(val): query.kind = val @@ -120,29 +99,26 @@ def _assign(val): self.assertRaises(TypeError, _assign, object()) def test_kind_setter_wo_existing(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _KIND = 'KIND' - dataset = Dataset(_DATASET) - query = self._makeOne(dataset=dataset) + query = self._makeOne(_DATASET) query.kind = _KIND - self.assertTrue(query.dataset is dataset) + self.assertEqual(query.dataset_id, _DATASET) self.assertEqual(query.kind, _KIND) def test_kind_setter_w_existing(self): - from gcloud.datastore.dataset import Dataset _DATASET = 'DATASET' _KIND_BEFORE = 'KIND_BEFORE' _KIND_AFTER = 'KIND_AFTER' - dataset = Dataset(_DATASET) - query = self._makeOne(_KIND_BEFORE, dataset) + query = self._makeOne(_DATASET, _KIND_BEFORE) self.assertEqual(query.kind, _KIND_BEFORE) query.kind = _KIND_AFTER - self.assertTrue(query.dataset is dataset) + self.assertEqual(query.dataset_id, _DATASET) self.assertEqual(query.kind, _KIND_AFTER) def test_ancestor_setter_w_non_key(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) def _assign(val): query.ancestor = val @@ -152,32 +128,37 @@ def _assign(val): def test_ancestor_setter_w_key(self): from gcloud.datastore.key import Key + _DATASET = 'DATASET' _NAME = u'NAME' key = Key('KIND', 123, dataset_id='DATASET') - query = self._makeOne() + query = self._makeOne(_DATASET) query.add_filter('name', '=', _NAME) query.ancestor = key self.assertEqual(query.ancestor.path, key.path) def test_ancestor_deleter_w_key(self): from gcloud.datastore.key import Key + _DATASET = 'DATASET' key = Key('KIND', 123, dataset_id='DATASET') - query = self._makeOne(ancestor=key) + query = self._makeOne(_DATASET, ancestor=key) del query.ancestor self.assertTrue(query.ancestor is None) def test_add_filter_setter_w_unknown_operator(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) self.assertRaises(ValueError, query.add_filter, 'firstname', '~~', 'John') def test_add_filter_w_known_operator(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) query.add_filter('firstname', '=', u'John') self.assertEqual(query.filters, [('firstname', '=', u'John')]) def test_add_filter_w_all_operators(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) query.add_filter('leq_prop', '<=', u'val1') query.add_filter('geq_prop', '>=', u'val2') query.add_filter('lt_prop', '<', u'val3') @@ -192,7 +173,8 @@ def test_add_filter_w_all_operators(self): def test_add_filter_w_known_operator_and_entity(self): from gcloud.datastore.entity import Entity - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) other = Entity() other['firstname'] = u'John' other['lastname'] = u'Smith' @@ -200,224 +182,161 @@ def test_add_filter_w_known_operator_and_entity(self): self.assertEqual(query.filters, [('other', '=', other)]) def test_add_filter_w_whitespace_property_name(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) PROPERTY_NAME = ' property with lots of space ' query.add_filter(PROPERTY_NAME, '=', u'John') self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')]) def test_add_filter___key__valid_key(self): from gcloud.datastore.key import Key - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) key = Key('Foo', dataset_id='DATASET') query.add_filter('__key__', '=', key) self.assertEqual(query.filters, [('__key__', '=', key)]) def test_filter___key__invalid_operator(self): from gcloud.datastore.key import Key + _DATASET = 'DATASET' key = Key('Foo', dataset_id='DATASET') - query = self._makeOne() + query = self._makeOne(_DATASET) self.assertRaises(ValueError, query.add_filter, '__key__', '<', key) def test_filter___key__invalid_value(self): - query = self._makeOne() + _DATASET = 'DATASET' + query = self._makeOne(_DATASET) self.assertRaises(ValueError, query.add_filter, '__key__', '=', None) def test_projection_setter_empty(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.projection = [] self.assertEqual(query.projection, []) def test_projection_setter_string(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.projection = 'field1' self.assertEqual(query.projection, ['field1']) def test_projection_setter_non_empty(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.projection = ['field1', 'field2'] self.assertEqual(query.projection, ['field1', 'field2']) def test_projection_setter_multiple_calls(self): + _DATASET = 'DATASET' _KIND = 'KIND' _PROJECTION1 = ['field1', 'field2'] _PROJECTION2 = ['field3'] - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.projection = _PROJECTION1 self.assertEqual(query.projection, _PROJECTION1) query.projection = _PROJECTION2 self.assertEqual(query.projection, _PROJECTION2) def test_keys_only(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.keys_only() self.assertEqual(query.projection, ['__key__']) def test_order_setter_empty(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND, order=['foo', '-bar']) + query = self._makeOne(_DATASET, _KIND, order=['foo', '-bar']) query.order = [] self.assertEqual(query.order, []) def test_order_setter_string(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.order = 'field' self.assertEqual(query.order, ['field']) def test_order_setter_single_item_list_desc(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.order = ['-field'] self.assertEqual(query.order, ['-field']) def test_order_setter_multiple(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.order = ['foo', '-bar'] self.assertEqual(query.order, ['foo', '-bar']) def test_group_by_setter_empty(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND, group_by=['foo', 'bar']) + query = self._makeOne(_DATASET, _KIND, group_by=['foo', 'bar']) query.group_by = [] self.assertEqual(query.group_by, []) def test_group_by_setter_string(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.group_by = 'field1' self.assertEqual(query.group_by, ['field1']) def test_group_by_setter_non_empty(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.group_by = ['field1', 'field2'] self.assertEqual(query.group_by, ['field1', 'field2']) def test_group_by_multiple_calls(self): + _DATASET = 'DATASET' _KIND = 'KIND' _GROUP_BY1 = ['field1', 'field2'] _GROUP_BY2 = ['field3'] - query = self._makeOne(_KIND) + query = self._makeOne(_DATASET, _KIND) query.group_by = _GROUP_BY1 self.assertEqual(query.group_by, _GROUP_BY1) query.group_by = _GROUP_BY2 self.assertEqual(query.group_by, _GROUP_BY2) - def test_fetch_defaults(self): + def test_fetch_defaults_wo_implicit_connection(self): + _DATASET = 'DATASET' + _KIND = 'KIND' + query = self._makeOne(_DATASET, _KIND) + self.assertRaises(ValueError, query.fetch) + + def test_fetch_defaults_w_implicit_connection(self): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) - iterator = query.fetch() + connection = _Connection() + query = self._makeOne(_DATASET, _KIND) + with _Monkey(_implicit_environ, CONNECTION=connection): + iterator = query.fetch() self.assertTrue(iterator._query is query) - self.assertEqual(iterator._limit, 0) + self.assertEqual(iterator._limit, None) self.assertEqual(iterator._offset, 0) def test_fetch_explicit(self): + _DATASET = 'DATASET' _KIND = 'KIND' - query = self._makeOne(_KIND) - iterator = query.fetch(limit=7, offset=8) + connection = _Connection() + query = self._makeOne(_DATASET, _KIND) + iterator = query.fetch(limit=7, offset=8, connection=connection) self.assertTrue(iterator._query is query) self.assertEqual(iterator._limit, 7) self.assertEqual(iterator._offset, 8) -class Test__pb_from_query(unittest2.TestCase): - - def _callFUT(self, query): - from gcloud.datastore.query import _pb_from_query - return _pb_from_query(query) - - def test_empty(self): - from gcloud.datastore import datastore_v1_pb2 as datastore_pb - pb = self._callFUT(_Query()) - self.assertEqual(list(pb.projection), []) - self.assertEqual(list(pb.kind), []) - self.assertEqual(list(pb.order), []) - self.assertEqual(list(pb.group_by), []) - self.assertEqual(pb.filter.property_filter.property.name, '') - cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) - self.assertEqual(list(cfilter.filter), []) - self.assertEqual(pb.start_cursor, b'') - self.assertEqual(pb.end_cursor, b'') - self.assertEqual(pb.limit, 0) - self.assertEqual(pb.offset, 0) - - def test_projection(self): - pb = self._callFUT(_Query(projection=['a', 'b', 'c'])) - self.assertEqual([item.property.name for item in pb.projection], - ['a', 'b', 'c']) - - def test_kind(self): - pb = self._callFUT(_Query(kind='KIND')) - self.assertEqual([item.name for item in pb.kind], ['KIND']) - - def test_ancestor(self): - from gcloud.datastore import datastore_v1_pb2 as datastore_pb - from gcloud.datastore.key import Key - from gcloud.datastore.helpers import _prepare_key_for_request - ancestor = Key('Ancestor', 123, dataset_id='DATASET') - pb = self._callFUT(_Query(ancestor=ancestor)) - cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) - self.assertEqual(len(cfilter.filter), 1) - pfilter = cfilter.filter[0].property_filter - self.assertEqual(pfilter.property.name, '__key__') - ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf()) - self.assertEqual(pfilter.value.key_value, ancestor_pb) - - def test_filter(self): - from gcloud.datastore import datastore_v1_pb2 as datastore_pb - query = _Query(filters=[('name', '=', u'John')]) - query.OPERATORS = { - '=': datastore_pb.PropertyFilter.EQUAL, - } - pb = self._callFUT(query) - cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) - self.assertEqual(len(cfilter.filter), 1) - pfilter = cfilter.filter[0].property_filter - self.assertEqual(pfilter.property.name, 'name') - self.assertEqual(pfilter.value.string_value, u'John') - - def test_filter_key(self): - from gcloud.datastore import datastore_v1_pb2 as datastore_pb - from gcloud.datastore.key import Key - from gcloud.datastore.helpers import _prepare_key_for_request - key = Key('Kind', 123, dataset_id='DATASET') - query = _Query(filters=[('__key__', '=', key)]) - query.OPERATORS = { - '=': datastore_pb.PropertyFilter.EQUAL, - } - pb = self._callFUT(query) - cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) - self.assertEqual(len(cfilter.filter), 1) - pfilter = cfilter.filter[0].property_filter - self.assertEqual(pfilter.property.name, '__key__') - key_pb = _prepare_key_for_request(key.to_protobuf()) - self.assertEqual(pfilter.value.key_value, key_pb) - - def test_order(self): - from gcloud.datastore import datastore_v1_pb2 as datastore_pb - pb = self._callFUT(_Query(order=['a', '-b', 'c'])) - self.assertEqual([item.property.name for item in pb.order], - ['a', 'b', 'c']) - self.assertEqual([item.direction for item in pb.order], - [datastore_pb.PropertyOrder.ASCENDING, - datastore_pb.PropertyOrder.DESCENDING, - datastore_pb.PropertyOrder.ASCENDING]) - - def test_group_by(self): - pb = self._callFUT(_Query(group_by=['a', 'b', 'c'])) - self.assertEqual([item.name for item in pb.group_by], - ['a', 'b', 'c']) - - class TestIterator(unittest2.TestCase): _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' @@ -426,15 +345,6 @@ class TestIterator(unittest2.TestCase): _START = b'\x00' _END = b'\xFF' - def setUp(self): - from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET - _implicit_environ.DATASET = None - - def tearDown(self): - from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset - def _getTargetClass(self): from gcloud.datastore.query import Iterator return Iterator @@ -442,37 +352,34 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def _makeDataset(self): - connection = _Connection() - dataset = _Dataset(self._DATASET, connection) - return dataset, connection - - def _addQueryResults(self, dataset, cursor=_END, more=False): + def _addQueryResults(self, connection, cursor=_END, more=False): from gcloud.datastore import datastore_v1_pb2 as datastore_pb MORE = datastore_pb.QueryResultBatch.NOT_FINISHED NO_MORE = datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT _ID = 123 entity_pb = datastore_pb.Entity() - entity_pb.key.partition_id.dataset_id = dataset.id() + entity_pb.key.partition_id.dataset_id = self._DATASET path_element = entity_pb.key.path_element.add() path_element.kind = self._KIND path_element.id = _ID prop = entity_pb.property.add() prop.name = 'foo' prop.value.string_value = u'Foo' - dataset.connection()._results.append( + connection._results.append( ([entity_pb], cursor, MORE if more else NO_MORE)) def test_ctor_defaults(self): + connection = _Connection() query = object() - iterator = self._makeOne(query) + iterator = self._makeOne(query, connection) self.assertTrue(iterator._query is query) self.assertEqual(iterator._limit, None) self.assertEqual(iterator._offset, 0) def test_ctor_explicit(self): - query = object() - iterator = self._makeOne(query, 13, 29) + connection = _Connection() + query = _Query() + iterator = self._makeOne(query, connection, 13, 29) self.assertTrue(iterator._query is query) self.assertEqual(iterator._limit, 13) self.assertEqual(iterator._offset, 29) @@ -480,11 +387,10 @@ def test_ctor_explicit(self): def test_next_page_no_cursors_no_more(self): from base64 import b64encode from gcloud.datastore.query import _pb_from_query - self._KIND = 'KIND' - dataset, connection = self._makeDataset() - query = _Query(self._KIND, dataset, self._NAMESPACE) - self._addQueryResults(dataset) - iterator = self._makeOne(query) + connection = _Connection() + query = _Query(self._DATASET, self._KIND, self._NAMESPACE) + self._addQueryResults(connection) + iterator = self._makeOne(query, connection) entities, more_results, cursor = iterator.next_page() self.assertEqual(cursor, b64encode(self._END)) @@ -506,11 +412,10 @@ def test_next_page_no_cursors_no_more(self): def test_next_page_no_cursors_no_more_w_offset_and_limit(self): from base64 import b64encode from gcloud.datastore.query import _pb_from_query - self._KIND = 'KIND' - dataset, connection = self._makeDataset() - query = _Query(self._KIND, dataset, self._NAMESPACE) - self._addQueryResults(dataset) - iterator = self._makeOne(query, 13, 29) + connection = _Connection() + query = _Query(self._DATASET, self._KIND, self._NAMESPACE) + self._addQueryResults(connection) + iterator = self._makeOne(query, connection, 13, 29) entities, more_results, cursor = iterator.next_page() self.assertEqual(cursor, b64encode(self._END)) @@ -534,10 +439,10 @@ def test_next_page_w_cursors_w_more(self): from base64 import b64decode from base64 import b64encode from gcloud.datastore.query import _pb_from_query - dataset, connection = self._makeDataset() - query = _Query(self._KIND, dataset, self._NAMESPACE) - self._addQueryResults(dataset, cursor=self._END, more=True) - iterator = self._makeOne(query) + connection = _Connection() + query = _Query(self._DATASET, self._KIND, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + iterator = self._makeOne(query, connection) iterator._start_cursor = self._START iterator._end_cursor = self._END entities, more_results, cursor = iterator.next_page() @@ -563,21 +468,20 @@ def test_next_page_w_cursors_w_more(self): self.assertEqual(connection._called_with, [EXPECTED]) def test_next_page_w_cursors_w_bogus_more(self): - dataset, connection = self._makeDataset() - query = _Query(self._KIND, dataset, self._NAMESPACE) - self._addQueryResults(dataset, cursor=self._END, more=True) + connection = _Connection() + query = _Query(self._DATASET, self._KIND, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) epb, cursor, _ = connection._results.pop() connection._results.append((epb, cursor, 4)) # invalid enum - iterator = self._makeOne(query) + iterator = self._makeOne(query, connection) self.assertRaises(ValueError, iterator.next_page) def test___iter___no_more(self): from gcloud.datastore.query import _pb_from_query - self._KIND = 'KIND' - dataset, connection = self._makeDataset() - query = _Query(self._KIND, dataset, self._NAMESPACE) - self._addQueryResults(dataset) - iterator = self._makeOne(query) + connection = _Connection() + query = _Query(self._DATASET, self._KIND, self._NAMESPACE) + self._addQueryResults(connection) + iterator = self._makeOne(query, connection) entities = list(iterator) self.assertFalse(iterator._more_results) @@ -596,11 +500,11 @@ def test___iter___no_more(self): def test___iter___w_more(self): from gcloud.datastore.query import _pb_from_query - dataset, connection = self._makeDataset() - query = _Query(self._KIND, dataset, self._NAMESPACE) - self._addQueryResults(dataset, cursor=self._END, more=True) - self._addQueryResults(dataset) - iterator = self._makeOne(query) + connection = _Connection() + query = _Query(self._DATASET, self._KIND, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + self._addQueryResults(connection) + iterator = self._makeOne(query, connection) entities = list(iterator) self.assertFalse(iterator._more_results) @@ -630,19 +534,112 @@ def test___iter___w_more(self): self.assertEqual(connection._called_with[1], EXPECTED2) +class Test__pb_from_query(unittest2.TestCase): + + def _callFUT(self, query): + from gcloud.datastore.query import _pb_from_query + return _pb_from_query(query) + + def test_empty(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + pb = self._callFUT(_Query()) + self.assertEqual(list(pb.projection), []) + self.assertEqual(list(pb.kind), []) + self.assertEqual(list(pb.order), []) + self.assertEqual(list(pb.group_by), []) + self.assertEqual(pb.filter.property_filter.property.name, '') + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(list(cfilter.filter), []) + self.assertEqual(pb.start_cursor, b'') + self.assertEqual(pb.end_cursor, b'') + self.assertEqual(pb.limit, 0) + self.assertEqual(pb.offset, 0) + + def test_projection(self): + pb = self._callFUT(_Query(projection=['a', 'b', 'c'])) + self.assertEqual([item.property.name for item in pb.projection], + ['a', 'b', 'c']) + + def test_kind(self): + pb = self._callFUT(_Query(kind='KIND')) + self.assertEqual([item.name for item in pb.kind], ['KIND']) + + def test_ancestor(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + from gcloud.datastore.key import Key + from gcloud.datastore.helpers import _prepare_key_for_request + ancestor = Key('Ancestor', 123, dataset_id='DATASET') + pb = self._callFUT(_Query(ancestor=ancestor)) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(len(cfilter.filter), 1) + pfilter = cfilter.filter[0].property_filter + self.assertEqual(pfilter.property.name, '__key__') + ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf()) + self.assertEqual(pfilter.value.key_value, ancestor_pb) + + def test_filter(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + query = _Query(filters=[('name', '=', u'John')]) + query.OPERATORS = { + '=': datastore_pb.PropertyFilter.EQUAL, + } + pb = self._callFUT(query) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(len(cfilter.filter), 1) + pfilter = cfilter.filter[0].property_filter + self.assertEqual(pfilter.property.name, 'name') + self.assertEqual(pfilter.value.string_value, u'John') + + def test_filter_key(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + from gcloud.datastore.key import Key + from gcloud.datastore.helpers import _prepare_key_for_request + key = Key('Kind', 123, dataset_id='DATASET') + query = _Query(filters=[('__key__', '=', key)]) + query.OPERATORS = { + '=': datastore_pb.PropertyFilter.EQUAL, + } + pb = self._callFUT(query) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(len(cfilter.filter), 1) + pfilter = cfilter.filter[0].property_filter + self.assertEqual(pfilter.property.name, '__key__') + key_pb = _prepare_key_for_request(key.to_protobuf()) + self.assertEqual(pfilter.value.key_value, key_pb) + + def test_order(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + pb = self._callFUT(_Query(order=['a', '-b', 'c'])) + self.assertEqual([item.property.name for item in pb.order], + ['a', 'b', 'c']) + self.assertEqual([item.direction for item in pb.order], + [datastore_pb.PropertyOrder.ASCENDING, + datastore_pb.PropertyOrder.DESCENDING, + datastore_pb.PropertyOrder.ASCENDING]) + + def test_group_by(self): + pb = self._callFUT(_Query(group_by=['a', 'b', 'c'])) + self.assertEqual([item.name for item in pb.group_by], + ['a', 'b', 'c']) + + class _Query(object): def __init__(self, + dataset_id=None, kind=None, - dataset=None, namespace=None, ancestor=None, filters=(), projection=(), order=(), group_by=()): + self.dataset_id = dataset_id self.kind = kind - self.dataset = dataset self.namespace = namespace self.ancestor = ancestor self.filters = filters From 3babce93db0d8422e5184416260c655b615b8f15 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 12:24:00 -0500 Subject: [PATCH 2/5] Remove dead fixture. --- gcloud/datastore/test_query.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 67ab27e44bc7..c739d80f7fef 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -648,19 +648,6 @@ def __init__(self, self.group_by = group_by -class _Dataset(object): - - def __init__(self, id, connection): - self._id = id - self._connection = connection - - def id(self): - return self._id - - def connection(self): - return self._connection - - class _Connection(object): _called_with = None From 6b1b2a86b929691513f95dea1b2b6fb307a34977 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 12:27:54 -0500 Subject: [PATCH 3/5] Remove no-longer-needed base. --- gcloud/datastore/query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index a5f9f8d696b4..b19d44e8066f 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -22,7 +22,7 @@ from gcloud.datastore.key import Key -class Query(_implicit_environ._DatastoreBase): +class Query(object): """A Query against the Cloud Datastore. This class serves as an abstraction for creating a query over data From 7a04aac0ade51e8063f80bbd74f73797185024a3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 12:28:06 -0500 Subject: [PATCH 4/5] Remove unused variables. --- gcloud/datastore/test_query.py | 1 - 1 file changed, 1 deletion(-) diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index c739d80f7fef..d843fa7ee014 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -25,7 +25,6 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_ctor_defaults_wo_implicit_dataset_id(self): - _DATASET = 'DATASET' self.assertRaises(ValueError, self._makeOne) def test_ctor_defaults_w_implicit_dataset_id(self): From cec3a39a48d3bea37c2d61fe1eb2feae9b9d2bb3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 12:28:45 -0500 Subject: [PATCH 5/5] Partway to fixing 'clear_datastore'. Note that this script likely needs more fixups. --- regression/clear_datastore.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/regression/clear_datastore.py b/regression/clear_datastore.py index 7dc72fa5df27..bda94522ecba 100644 --- a/regression/clear_datastore.py +++ b/regression/clear_datastore.py @@ -35,7 +35,8 @@ def fetch_keys(dataset, kind, fetch_max=FETCH_MAX, query=None, cursor=None): if query is None: - query = Query(kind=kind, dataset=dataset, projection=['__key__']) + query = Query( + dataset_id=dataset.id(), kind=kind, projection=['__key__']) iterator = query.fetch(limit=fetch_max, start_cursor=cursor)