Skip to content

Commit

Permalink
Implement predefined acl (#4757)
Browse files Browse the repository at this point in the history
* Storage: Implement predefined acl

* Review Changes

* Review Changes2

* Review Changes3

* use keyword arguments for predefined_acl
  • Loading branch information
chemelnucfin authored Feb 27, 2018
1 parent 00ea201 commit 34cedf3
Show file tree
Hide file tree
Showing 5 changed files with 143 additions and 43 deletions.
23 changes: 18 additions & 5 deletions storage/google/cloud/storage/acl.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,23 @@ def _ensure_loaded(self):
if not self.loaded:
self.reload()

@classmethod
def validate_predefined(cls, predefined):
"""Ensures predefined is in list of predefined json values
:type predefined: str
:param predefined: name of a predefined acl
:type predefined: str
:param predefined: validated JSON name of predefined acl
:raises: :exc: `ValueError`: If predefined is not a valid acl
"""
predefined = cls.PREDEFINED_XML_ACLS.get(predefined, predefined)
if predefined and predefined not in cls.PREDEFINED_JSON_ACLS:
raise ValueError("Invalid predefined ACL: %s" % (predefined,))
return predefined

def reset(self):
"""Remove all entities from the ACL, and clear the ``loaded`` flag."""
self.entities.clear()
Expand Down Expand Up @@ -502,11 +519,7 @@ def save_predefined(self, predefined, client=None):
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the ACL's parent.
"""
predefined = self.PREDEFINED_XML_ACLS.get(predefined, predefined)

if predefined not in self.PREDEFINED_JSON_ACLS:
raise ValueError("Invalid predefined ACL: %s" % (predefined,))

predefined = self.validate_predefined(predefined)
self._save(None, predefined, client)

def clear(self, client=None):
Expand Down
67 changes: 53 additions & 14 deletions storage/google/cloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
from google.cloud.storage._helpers import _PropertyMixin
from google.cloud.storage._helpers import _scalar_property
from google.cloud.storage._signing import generate_signed_url
from google.cloud.storage.acl import ACL
from google.cloud.storage.acl import ObjectACL


Expand Down Expand Up @@ -633,7 +634,7 @@ def _get_upload_arguments(self, content_type):
return headers, object_metadata, content_type

def _do_multipart_upload(self, client, stream, content_type,
size, num_retries):
size, num_retries, predefined_acl):
"""Perform a multipart upload.
Assumes ``chunk_size`` is :data:`None` on the current blob.
Expand Down Expand Up @@ -664,6 +665,9 @@ def _do_multipart_upload(self, client, stream, content_type,
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the multipart
upload request.
Expand All @@ -688,6 +692,8 @@ def _do_multipart_upload(self, client, stream, content_type,

if self.user_project is not None:
name_value_pairs.append(('userProject', self.user_project))
if predefined_acl is not None:
name_value_pairs.append(('predefinedAcl', predefined_acl))

upload_url = _add_query_parameters(base_url, name_value_pairs)
upload = MultipartUpload(upload_url, headers=headers)
Expand All @@ -702,8 +708,9 @@ def _do_multipart_upload(self, client, stream, content_type,
return response

def _initiate_resumable_upload(self, client, stream, content_type,
size, num_retries, extra_headers=None,
chunk_size=None):
size, num_retries,
predefined_acl=None,
extra_headers=None, chunk_size=None):
"""Initiate a resumable upload.
The content type of the upload will be determined in order
Expand All @@ -728,6 +735,9 @@ def _initiate_resumable_upload(self, client, stream, content_type,
from ``stream``). If not provided, the upload will be
concluded once ``stream`` is exhausted (or :data:`None`).
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
:type num_retries: int
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
Expand Down Expand Up @@ -766,6 +776,8 @@ def _initiate_resumable_upload(self, client, stream, content_type,

if self.user_project is not None:
name_value_pairs.append(('userProject', self.user_project))
if predefined_acl is not None:
name_value_pairs.append(('predefinedAcl', predefined_acl))

upload_url = _add_query_parameters(base_url, name_value_pairs)
upload = ResumableUpload(upload_url, chunk_size, headers=headers)
Expand All @@ -781,7 +793,7 @@ def _initiate_resumable_upload(self, client, stream, content_type,
return upload, transport

def _do_resumable_upload(self, client, stream, content_type,
size, num_retries):
size, num_retries, predefined_acl):
"""Perform a resumable upload.
Assumes ``chunk_size`` is not :data:`None` on the current blob.
Expand Down Expand Up @@ -812,19 +824,24 @@ def _do_resumable_upload(self, client, stream, content_type,
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the final chunk
is uploaded.
"""
upload, transport = self._initiate_resumable_upload(
client, stream, content_type, size, num_retries)
client, stream, content_type, size, num_retries,
predefined_acl=predefined_acl)

while not upload.finished:
response = upload.transmit_next_chunk(transport)

return response

def _do_upload(self, client, stream, content_type, size, num_retries):
def _do_upload(self, client, stream, content_type,
size, num_retries, predefined_acl):
"""Determine an upload strategy and then perform the upload.
If the current blob has a ``chunk_size`` set, then a resumable upload
Expand Down Expand Up @@ -857,22 +874,28 @@ def _do_upload(self, client, stream, content_type, size, num_retries):
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
:rtype: dict
:returns: The parsed JSON from the "200 OK" response. This will be the
**only** response in the multipart case and it will be the
**final** response in the resumable case.
"""
if self.chunk_size is None:
response = self._do_multipart_upload(
client, stream, content_type, size, num_retries)
client, stream, content_type,
size, num_retries, predefined_acl)
else:
response = self._do_resumable_upload(
client, stream, content_type, size, num_retries)
client, stream, content_type, size,
num_retries, predefined_acl)

return response.json()

def upload_from_file(self, file_obj, rewind=False, size=None,
content_type=None, num_retries=None, client=None):
content_type=None, num_retries=None, client=None,
predefined_acl=None):
"""Upload the contents of this blob from a file-like object.
The content type of the upload will be determined in order
Expand Down Expand Up @@ -930,6 +953,9 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
:raises: :class:`~google.cloud.exceptions.GoogleCloudError`
if the upload response returns an error status.
Expand All @@ -941,14 +967,18 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning)

_maybe_rewind(file_obj, rewind=rewind)
predefined_acl = ACL.validate_predefined(predefined_acl)

try:
created_json = self._do_upload(
client, file_obj, content_type, size, num_retries)
client, file_obj, content_type,
size, num_retries, predefined_acl)
self._set_properties(created_json)
except resumable_media.InvalidResponse as exc:
_raise_from_invalid_response(exc)

def upload_from_filename(self, filename, content_type=None, client=None):
def upload_from_filename(self, filename, content_type=None, client=None,
predefined_acl=None):
"""Upload this blob's contents from the content of a named file.
The content type of the upload will be determined in order
Expand Down Expand Up @@ -982,16 +1012,20 @@ def upload_from_filename(self, filename, content_type=None, client=None):
:type client: :class:`~google.cloud.storage.client.Client`
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
"""
content_type = self._get_content_type(content_type, filename=filename)

with open(filename, 'rb') as file_obj:
total_bytes = os.fstat(file_obj.fileno()).st_size
self.upload_from_file(
file_obj, content_type=content_type, client=client,
size=total_bytes)
size=total_bytes, predefined_acl=predefined_acl)

def upload_from_string(self, data, content_type='text/plain', client=None):
def upload_from_string(self, data, content_type='text/plain', client=None,
predefined_acl=None):
"""Upload contents of this blob from the provided string.
.. note::
Expand Down Expand Up @@ -1020,12 +1054,16 @@ def upload_from_string(self, data, content_type='text/plain', client=None):
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
:type predefined_acl: str
:param predefined_acl: (Optional) predefined access control list
"""
data = _to_bytes(data, encoding='utf-8')
string_buffer = BytesIO(data)
self.upload_from_file(
file_obj=string_buffer, size=len(data),
content_type=content_type, client=client)
content_type=content_type, client=client,
predefined_acl=predefined_acl)

def create_resumable_upload_session(
self,
Expand Down Expand Up @@ -1110,6 +1148,7 @@ def create_resumable_upload_session(
# matters when **sending** bytes to an upload.
upload, _ = self._initiate_resumable_upload(
client, dummy_stream, content_type, size, None,
predefined_acl=None,
extra_headers=extra_headers,
chunk_size=self._CHUNK_SIZE_MULTIPLE)

Expand Down
23 changes: 23 additions & 0 deletions storage/tests/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,6 +408,29 @@ def test_blob_acl_w_user_project(self):
acl.save()
self.assertFalse(acl.has_entity('allUsers'))

def test_upload_blob_acl(self):
control = self.bucket.blob('logo')
control_data = self.FILES['logo']

blob = self.bucket.blob('SmallFile')
file_data = self.FILES['simple']

try:
control.upload_from_filename(control_data['path'])
blob.upload_from_filename(file_data['path'],
predefined_acl='publicRead')
finally:
self.case_blobs_to_delete.append(blob)
self.case_blobs_to_delete.append(control)

control_acl = control.acl
self.assertNotIn('READER', control_acl.all().get_roles())
acl = blob.acl
self.assertIn('READER', acl.all().get_roles())
acl.all().revoke_read()
self.assertSequenceEqual(acl.all().get_roles(), set([]))
self.assertEqual(control_acl.all().get_roles(), acl.all().get_roles())

def test_write_metadata(self):
filename = self.FILES['logo']['path']
blob_name = os.path.basename(filename)
Expand Down
8 changes: 8 additions & 0 deletions storage/tests/unit/test_acl.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,14 @@ def _get_target_class():
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)

def test_validate_predefined(self):
ACL = self._get_target_class()
self.assertIsNone(ACL.validate_predefined(None))
self.assertEqual(ACL.validate_predefined('public-read'), 'publicRead')
self.assertEqual(ACL.validate_predefined('publicRead'), 'publicRead')
with self.assertRaises(ValueError):
ACL.validate_predefined('publicread')

def test_ctor(self):
acl = self._make_one()
self.assertEqual(acl.entities, {})
Expand Down
Loading

0 comments on commit 34cedf3

Please sign in to comment.