diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 0f36e7f04214..40358f33ee29 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -46,6 +46,7 @@ def __init__(self, name=None): :param name: The name of the object. """ self.name = name + self._is_future = False self._properties = {} self._changes = set() @@ -54,10 +55,9 @@ def reload(self): # Pass only '?projection=noAcl' here because 'acl' and related # are handled via custom endpoints. query_params = {'projection': 'noAcl'} - self._properties = self.connection.api_request( + api_response = self.connection.api_request( method='GET', path=self.path, query_params=query_params) - # If the api_request succeeded, we reset changes. - self._changes = set() + self._set_properties(api_response) def _patch_property(self, name, value): """Update field of this object's properties. @@ -74,8 +74,31 @@ def _patch_property(self, name, value): :type value: object :param value: The value being updated. """ + self._get_properties()[name] = value self._changes.add(name) - self._properties[name] = value + + def _set_properties(self, value): + """Set the properties for the current object. + + :type value: dict + :param value: The properties to be set. + """ + self._properties = value + # If the values are reset, the changes must as well. + self._changes = set() + + def _get_properties(self): + """Get the properties for the current object. + + :rtype: dict + :returns: The properties of the current object. + :raises: :class:`ValueError` if the object is designated as a + future. + """ + if self._is_future: + raise ValueError(self, ('is a future. It cannot be used' + 'until the request has completed')) + return self._properties def patch(self): """Sends all changed properties in a PATCH request. @@ -84,13 +107,12 @@ def patch(self): """ # Pass '?projection=full' here because 'PATCH' documented not # to work properly w/ 'noAcl'. - update_properties = dict((key, self._properties[key]) + update_properties = dict((key, self._get_properties()[key]) for key in self._changes) - self._properties = self.connection.api_request( + api_response = self.connection.api_request( method='PATCH', path=self.path, data=update_properties, query_params={'projection': 'full'}) - # If the api_request succeeded, we reset changes. - self._changes = set() + self._set_properties(api_response) def _scalar_property(fieldname): @@ -98,7 +120,7 @@ def _scalar_property(fieldname): """ def _getter(self): """Scalar property getter.""" - return self._properties.get(fieldname) + return self._get_properties().get(fieldname) def _setter(self, value): """Scalar property setter.""" diff --git a/gcloud/storage/api.py b/gcloud/storage/api.py index ae3243a6ed52..2ac270102870 100644 --- a/gcloud/storage/api.py +++ b/gcloud/storage/api.py @@ -227,7 +227,7 @@ def get_items_from_response(self, response): for item in response.get('items', []): name = item.get('name') bucket = Bucket(name, connection=self.connection) - bucket._properties = item + bucket._set_properties(item) yield bucket diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index ff5aef4f9f4b..f249c0f25183 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -250,7 +250,7 @@ def download_to_filename(self, filename): mtime = time.mktime( datetime.datetime.strptime( - self._properties['updated'], + self._get_properties()['updated'], '%Y-%m-%dT%H:%M:%S.%fz').timetuple() ) os.utime(file_obj.name, (mtime, mtime)) @@ -304,7 +304,8 @@ def upload_from_file(self, file_obj, rewind=False, size=None, :type num_retries: integer :param num_retries: Number of upload retries. Defaults to 6. """ - content_type = (content_type or self._properties.get('contentType') or + content_type = (content_type or + self._get_properties().get('contentType') or 'application/octet-stream') # Rewind the file if desired. @@ -358,7 +359,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None, if not isinstance(response_content, six.string_types): # pragma: NO COVER Python3 response_content = response_content.decode('utf-8') - self._properties = json.loads(response_content) + self._set_properties(json.loads(response_content)) def upload_from_filename(self, filename, content_type=None): """Upload this blob's contents from the content of a named file. @@ -385,7 +386,8 @@ def upload_from_filename(self, filename, content_type=None): :type content_type: string or ``NoneType`` :param content_type: Optional type of content being uploaded. """ - content_type = content_type or self._properties.get('contentType') + content_type = (content_type or + self._get_properties().get('contentType')) if content_type is None: content_type, _ = mimetypes.guess_type(filename) @@ -500,7 +502,7 @@ def component_count(self): ``None`` if the property is not set locally. This property will not be set on objects not created via ``compose``. """ - component_count = self._properties.get('componentCount') + component_count = self._get_properties().get('componentCount') if component_count is not None: return int(component_count) @@ -514,7 +516,7 @@ def etag(self): :rtype: string or ``NoneType`` :returns: The blob etag or ``None`` if the property is not set locally. """ - return self._properties.get('etag') + return self._get_properties().get('etag') @property def generation(self): @@ -526,7 +528,7 @@ def generation(self): :returns: The generation of the blob or ``None`` if the property is not set locally. """ - generation = self._properties.get('generation') + generation = self._get_properties().get('generation') if generation is not None: return int(generation) @@ -540,7 +542,7 @@ def id(self): :returns: The ID of the blob or ``None`` if the property is not set locally. """ - return self._properties.get('id') + return self._get_properties().get('id') md5_hash = _scalar_property('md5Hash') """MD5 hash for this object. @@ -563,7 +565,7 @@ def media_link(self): :returns: The media link for the blob or ``None`` if the property is not set locally. """ - return self._properties.get('mediaLink') + return self._get_properties().get('mediaLink') @property def metadata(self): @@ -575,7 +577,7 @@ def metadata(self): :returns: The metadata associated with the blob or ``None`` if the property is not set locally. """ - return copy.deepcopy(self._properties.get('metadata')) + return copy.deepcopy(self._get_properties().get('metadata')) @metadata.setter def metadata(self, value): @@ -598,7 +600,7 @@ def metageneration(self): :returns: The metageneration of the blob or ``None`` if the property is not set locally. """ - metageneration = self._properties.get('metageneration') + metageneration = self._get_properties().get('metageneration') if metageneration is not None: return int(metageneration) @@ -612,7 +614,7 @@ def owner(self): :returns: Mapping of owner's role/ID. If the property is not set locally, returns ``None``. """ - return copy.deepcopy(self._properties.get('owner')) + return copy.deepcopy(self._get_properties().get('owner')) @property def self_link(self): @@ -624,7 +626,7 @@ def self_link(self): :returns: The self link for the blob or ``None`` if the property is not set locally. """ - return self._properties.get('selfLink') + return self._get_properties().get('selfLink') @property def size(self): @@ -636,7 +638,7 @@ def size(self): :returns: The size of the blob or ``None`` if the property is not set locally. """ - size = self._properties.get('size') + size = self._get_properties().get('size') if size is not None: return int(size) @@ -652,7 +654,7 @@ def storage_class(self): :returns: If set, one of "STANDARD", "NEARLINE", or "DURABLE_REDUCED_AVAILABILITY", else ``None``. """ - return self._properties.get('storageClass') + return self._get_properties().get('storageClass') @property def time_deleted(self): @@ -665,7 +667,7 @@ def time_deleted(self): set locally. If the blob has not been deleted, this will never be set. """ - return self._properties.get('timeDeleted') + return self._get_properties().get('timeDeleted') @property def updated(self): @@ -677,7 +679,7 @@ def updated(self): :returns: RFC3339 valid timestamp, or ``None`` if the property is not set locally. """ - return self._properties.get('updated') + return self._get_properties().get('updated') class _UploadConfig(object): diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index b9fa8f2367dc..295b79ea048d 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -73,7 +73,7 @@ def get_items_from_response(self, response): for item in response.get('items', []): name = item.get('name') blob = Blob(name, bucket=self.bucket) - blob._properties = item + blob._set_properties(item) yield blob @@ -150,9 +150,10 @@ def create(self, project=None): 'from environment.') query_params = {'project': project} - self._properties = self.connection.api_request( + api_response = self.connection.api_request( method='POST', path='/b', query_params=query_params, data={'name': self.name}) + self._set_properties(api_response) @property def acl(self): @@ -218,7 +219,7 @@ def get_blob(self, blob_name): path=blob.path) name = response.get('name') # Expect this to be blob_name blob = Blob(name, bucket=self) - blob._properties = response + blob._set_properties(response) return blob except NotFound: return None @@ -406,7 +407,7 @@ def copy_blob(self, blob, destination_bucket, new_name=None): new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = self.connection.api_request(method='POST', path=api_path) - new_blob._properties = copy_result + new_blob._set_properties(copy_result) return new_blob def upload_file(self, filename, blob_name=None): @@ -504,7 +505,7 @@ def cors(self): :returns: A sequence of mappings describing each CORS policy. """ return [copy.deepcopy(policy) - for policy in self._properties.get('cors', ())] + for policy in self._get_properties().get('cors', ())] @cors.setter def cors(self, entries): @@ -529,7 +530,7 @@ def etag(self): :returns: The bucket etag or ``None`` if the property is not set locally. """ - return self._properties.get('etag') + return self._get_properties().get('etag') @property def id(self): @@ -541,7 +542,7 @@ def id(self): :returns: The ID of the bucket or ``None`` if the property is not set locally. """ - return self._properties.get('id') + return self._get_properties().get('id') @property def lifecycle_rules(self): @@ -553,7 +554,7 @@ def lifecycle_rules(self): :rtype: list(dict) :returns: A sequence of mappings describing each lifecycle rule. """ - info = self._properties.get('lifecycle', {}) + info = self._get_properties().get('lifecycle', {}) return [copy.deepcopy(rule) for rule in info.get('rule', ())] @lifecycle_rules.setter @@ -588,7 +589,7 @@ def get_logging(self): :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix`` (if logging is enabled), or None (if not). """ - info = self._properties.get('logging') + info = self._get_properties().get('logging') return copy.deepcopy(info) def enable_logging(self, bucket_name, object_prefix=''): @@ -622,7 +623,7 @@ def metageneration(self): :returns: The metageneration of the bucket or ``None`` if the property is not set locally. """ - metageneration = self._properties.get('metageneration') + metageneration = self._get_properties().get('metageneration') if metageneration is not None: return int(metageneration) @@ -636,7 +637,7 @@ def owner(self): :returns: Mapping of owner's role/ID. If the property is not set locally, returns ``None``. """ - return copy.deepcopy(self._properties.get('owner')) + return copy.deepcopy(self._get_properties().get('owner')) @property def project_number(self): @@ -648,7 +649,7 @@ def project_number(self): :returns: The project number that owns the bucket or ``None`` if the property is not set locally. """ - project_number = self._properties.get('projectNumber') + project_number = self._get_properties().get('projectNumber') if project_number is not None: return int(project_number) @@ -662,7 +663,7 @@ def self_link(self): :returns: The self link for the bucket or ``None`` if the property is not set locally. """ - return self._properties.get('selfLink') + return self._get_properties().get('selfLink') @property def storage_class(self): @@ -676,7 +677,7 @@ def storage_class(self): :returns: If set, one of "STANDARD", "NEARLINE", or "DURABLE_REDUCED_AVAILABILITY", else ``None``. """ - return self._properties.get('storageClass') + return self._get_properties().get('storageClass') @property def time_created(self): @@ -688,7 +689,7 @@ def time_created(self): :returns: RFC3339 valid timestamp, or ``None`` if the property is not set locally. """ - return self._properties.get('timeCreated') + return self._get_properties().get('timeCreated') @property def versioning_enabled(self): @@ -700,7 +701,7 @@ def versioning_enabled(self): :rtype: boolean :returns: True if enabled, else False. """ - versioning = self._properties.get('versioning', {}) + versioning = self._get_properties().get('versioning', {}) return versioning.get('enabled', False) @versioning_enabled.setter diff --git a/gcloud/storage/iterator.py b/gcloud/storage/iterator.py index 284e9b5392e4..ef65882988f2 100644 --- a/gcloud/storage/iterator.py +++ b/gcloud/storage/iterator.py @@ -26,7 +26,7 @@ def get_items_from_response(self, response): items = response.get('items', []) for item in items: my_item = MyItemClass(other_arg=True) - my_item._properties = item + my_item._set_properties(item) yield my_item You then can use this to get **all** the results from a resource:: diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index 8734f1ffb093..56a4b1b38ba4 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -46,6 +46,11 @@ def test_path_is_abstract(self): mixin = self._makeOne() self.assertRaises(NotImplementedError, lambda: mixin.path) + def test_future_fails(self): + mixin = self._makeOne() + mixin._is_future = True + self.assertRaises(ValueError, mixin._get_properties) + def test_reload(self): connection = _Connection({'foo': 'Foo'}) derived = self._derivedClass(connection, '/path')() @@ -105,6 +110,9 @@ def test_getter(self): class Test(object): def __init__(self, **kw): self._properties = kw.copy() + + def _get_properties(self): + return self._properties do_re_mi = self._callFUT('solfege') test = Test(solfege='Latido') diff --git a/regression/storage.py b/regression/storage.py index 027d5b57cdc7..844e9d265dc8 100644 --- a/regression/storage.py +++ b/regression/storage.py @@ -115,7 +115,7 @@ class TestStorageWriteFiles(TestStorageFiles): def test_large_file_write_from_stream(self): blob = storage.Blob(bucket=self.bucket, name='LargeFile') - self.assertEqual(blob._properties, {}) + self.assertEqual(blob._get_properties(), {}) file_data = self.FILES['big'] with open(file_data['path'], 'rb') as file_obj: @@ -129,7 +129,7 @@ def test_large_file_write_from_stream(self): def test_small_file_write_from_filename(self): blob = storage.Blob(bucket=self.bucket, name='SmallFile') - self.assertEqual(blob._properties, {}) + self.assertEqual(blob._get_properties(), {}) file_data = self.FILES['simple'] blob.upload_from_filename(file_data['path'])