Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

477: Prepare to remove dataset #499

Closed
wants to merge 9 commits into from
47 changes: 29 additions & 18 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID'


def set_default_dataset(dataset_id=None):
def set_default_dataset_id(dataset_id=None):
"""Set default dataset ID either explicitly or implicitly as fall-back.

In implicit case, currently only supports enviroment variable but will
Expand All @@ -79,7 +79,6 @@ def set_default_dataset(dataset_id=None):

if dataset_id is not None:
_implicit_environ.DATASET_ID = dataset_id
_implicit_environ.DATASET = get_dataset(dataset_id)


def set_default_connection(connection=None):
Expand Down Expand Up @@ -138,28 +137,40 @@ def get_dataset(dataset_id):
return Dataset(dataset_id, connection=connection)


def _require_dataset():
"""Convenience method to ensure DATASET is set.
def _require_dataset_id(dataset_id=None):
"""Infer a dataset ID from the environment, if not passed explicitly.

:type dataset_id: :class:`str`.
:param dataset_id: Optional.

:rtype: :class:`gcloud.datastore.dataset.Dataset`
:returns: A dataset based on the current environment.
:raises: :class:`EnvironmentError` if DATASET is not set.
:raises: :class:`EnvironmentError` if ``dataset_id`` is None,
and cannot be inferred from the environment.
"""
if _implicit_environ.DATASET is None:
raise EnvironmentError('Dataset could not be inferred.')
return _implicit_environ.DATASET
if dataset_id is None:
if _implicit_environ.DATASET_ID is None:
raise EnvironmentError('Dataset ID could not be inferred.')
dataset_id = _implicit_environ.DATASET_ID
return dataset_id


def _require_connection():
"""Convenience method to ensure CONNECTION is set.
def _require_connection(connection=None):
"""Infer a connection from the environment, if not passed explicitly.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional.

:rtype: :class:`gcloud.datastore.connection.Connection`
:returns: A connection based on the current environment.
:raises: :class:`EnvironmentError` if CONNECTION is not set.
:raises: :class:`EnvironmentError` if ``connection`` is None, and
cannot be inferred from the environment.
"""
if _implicit_environ.CONNECTION is None:
raise EnvironmentError('Connection could not be inferred.')
return _implicit_environ.CONNECTION
if connection is None:
if _implicit_environ.CONNECTION is None:
raise EnvironmentError('Connection could not be inferred.')
connection = _implicit_environ.CONNECTION
return connection


def get_entities(keys, missing=None, deferred=None,
Expand Down Expand Up @@ -188,8 +199,8 @@ def get_entities(keys, missing=None, deferred=None,
:rtype: list of :class:`gcloud.datastore.entity.Entity`
:returns: The requested entities.
"""
connection = connection or _require_connection()
dataset_id = dataset_id or _require_dataset().id()
connection = _require_connection(connection)
dataset_id = _require_dataset_id(dataset_id)

entity_pbs = connection.lookup(
dataset_id=dataset_id,
Expand Down Expand Up @@ -233,8 +244,8 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None):
:returns: The (complete) keys allocated with `incomplete_key` as root.
:raises: `ValueError` if `incomplete_key` is not a partial key.
"""
connection = connection or _require_connection()
dataset_id = dataset_id or _require_dataset().id()
connection = _require_connection(connection)
dataset_id = _require_dataset_id(dataset_id)

if not incomplete_key.is_partial:
raise ValueError(('Key is not partial.', incomplete_key))
Expand Down
34 changes: 16 additions & 18 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,17 +163,13 @@ def lookup(self, dataset_id, key_pbs,
(:class:`gcloud.datastore.datastore_v1_pb2.Key` and
:class:`gcloud.datastore.datastore_v1_pb2.Entity`) and is used
under the hood for methods like
:func:`gcloud.datastore.dataset.Dataset.get_entity`:
:func:`gcloud.datastore.key.Key.get`:

>>> from gcloud import datastore
>>> from gcloud.datastore.key import Key
>>> connection = datastore.get_connection()
>>> dataset = connection.dataset('dataset-id')
>>> key = Key(dataset=dataset).kind('MyKind').id(1234)

Using the :class:`gcloud.datastore.dataset.Dataset` helper:

>>> dataset.get_entity(key)
>>> key = Key('MyKind', 1234, dataset_id='dataset-id')
>>> key.get()
<Entity object>

Using the ``connection`` class directly:
Expand All @@ -182,7 +178,7 @@ def lookup(self, dataset_id, key_pbs,
<Entity protobuf>

:type dataset_id: string
:param dataset_id: The dataset to look up the keys.
:param dataset_id: The ID of the dataset to look up the keys.

:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
(or a single Key)
Expand Down Expand Up @@ -262,12 +258,12 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False):
uses this method to fetch data:

>>> from gcloud import datastore
>>> from gcloud.datastore.query import Query
>>> connection = datastore.get_connection()
>>> dataset = connection.dataset('dataset-id')
>>> query = dataset.query().kind('MyKind').filter(
... 'property', '=', 'val')
>>> query = Query(dataset_id='dataset-id', 'MyKind')
>>> query.add_filter('property', '=', 'val')

Using the `fetch`` method...
Using the query's ``fetch_page`` method...

>>> entities, cursor, more_results = query.fetch_page()
>>> entities
Expand Down Expand Up @@ -319,7 +315,7 @@ def begin_transaction(self, dataset_id, serializable=False):
Maps the ``DatastoreService.BeginTransaction`` protobuf RPC.

:type dataset_id: string
:param dataset_id: The dataset over which to execute the transaction.
:param dataset_id: The ID dataset to which the transaction applies.
"""

if self.transaction():
Expand All @@ -346,7 +342,7 @@ def commit(self, dataset_id, mutation_pb):
Maps the ``DatastoreService.Commit`` protobuf RPC.

:type dataset_id: string
:param dataset_id: The dataset in which to perform the changes.
:param dataset_id: The ID dataset to which the transaction applies.

:type mutation_pb: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`.
:param mutation_pb: The protobuf for the mutations being saved.
Expand Down Expand Up @@ -376,7 +372,8 @@ def rollback(self, dataset_id):
if the connection isn't currently in a transaction.

:type dataset_id: string
:param dataset_id: The dataset to which the transaction belongs.
:param dataset_id: The id of the dataset to which the transaction
belongs.
"""
if not self.transaction() or not self.transaction().id:
raise ValueError('No transaction to rollback.')
Expand All @@ -393,7 +390,8 @@ def allocate_ids(self, dataset_id, key_pbs):
Maps the ``DatastoreService.AllocateIds`` protobuf RPC.

:type dataset_id: string
:param dataset_id: The dataset to which the transaction belongs.
:param dataset_id: The id of the dataset to which the transaction
belongs.

:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys for which the backend should allocate IDs.
Expand All @@ -418,7 +416,7 @@ def save_entity(self, dataset_id, key_pb, properties,
not passed in 'properties' no longer be set for the entity.

:type dataset_id: string
:param dataset_id: The dataset in which to save the entity.
:param dataset_id: The id of the dataset in which to save the entity.

:type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pb: The complete or partial key for the entity.
Expand Down Expand Up @@ -490,7 +488,7 @@ def delete_entities(self, dataset_id, key_pbs):
:func:`gcloud.datastore.entity.Entity.delete` method.

:type dataset_id: string
:param dataset_id: The dataset from which to delete the keys.
:param dataset_id: The ID of the dataset from which to delete the keys.

:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys to delete from the datastore.
Expand Down
5 changes: 2 additions & 3 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,9 @@ class Entity(dict):
This means you could take an existing entity and change the key
to duplicate the object.

Use :func:`gcloud.datastore.dataset.Dataset.get_entity`
to retrieve an existing entity.
Use :func:`gcloud.datastore.key.Key.get` to retrieve an existing entity.

>>> dataset.get_entity(key)
>>> key.get()
<Entity[{'kind': 'EntityKind', id: 1234}] {'property': 'value'}>

You can the set values on the entity just like you would on any
Expand Down
45 changes: 26 additions & 19 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,28 +66,13 @@ def __init__(self, *path_args, **kwargs):
keyword argument.
"""
self._flat_path = path_args
self._parent = kwargs.get('parent')
parent = self._parent = kwargs.get('parent')
self._namespace = kwargs.get('namespace')
self._dataset_id = kwargs.get('dataset_id')
dataset_id = kwargs.get('dataset_id')
self._dataset_id = _validate_dataset_id(dataset_id, parent)
# _flat_path, _parent, _namespace and _dataset_id must be set before
# _combine_args() is called.
self._path = self._combine_args()
self._validate_dataset_id()

def _validate_dataset_id(self):
"""Ensures the dataset ID is set.

If unset, attempts to imply the ID from the environment.

:raises: `ValueError` if there is no `dataset_id` and none
can be implied.
"""
if self._dataset_id is None:
if _implicit_environ.DATASET is not None:
# This assumes DATASET.id() is not None.
self._dataset_id = _implicit_environ.DATASET.id()
else:
raise ValueError('A Key must have a dataset ID set.')

@staticmethod
def _parse_path(path_args):
Expand Down Expand Up @@ -345,7 +330,7 @@ def dataset_id(self):
"""Dataset ID getter.

:rtype: :class:`str`
:returns: The key's dataset.
:returns: The key's dataset ID.
"""
return self._dataset_id

Expand Down Expand Up @@ -384,3 +369,25 @@ def parent(self):

def __repr__(self):
return '<Key%s, dataset=%s>' % (self.path, self.dataset_id)


def _validate_dataset_id(dataset_id, parent):

This comment was marked as spam.

"""Ensure the dataset ID is set appropriately.

If ``parent`` is passed, skip the test (it will be checked / fixed up
later).

If ``dataset_id`` is unset, attempt to infer the ID from the environment.

:raises: `ValueError` if ``dataset_id`` is None and none can be inferred.
"""
if parent is None:

if dataset_id is None:

if _implicit_environ.DATASET_ID is None:
raise ValueError("A Key must have a dataset ID set.")

dataset_id = _implicit_environ.DATASET_ID

return dataset_id
Loading