From ec42cab5196065c16f5888cad073b032c5a9817c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Jul 2015 10:33:32 -0700 Subject: [PATCH 1/6] Implementing Client.get_bucket and lookup_bucket. Essentially just copied and pasted from storage.api, with a few tweaks for docstring typos. Only implemented 2 of 3 tests for each method since the third tests were for the case of an implicit connection. --- gcloud/storage/client.py | 52 ++++++++++++++++ gcloud/storage/test_client.py | 111 ++++++++++++++++++++++++++++++++++ 2 files changed, 163 insertions(+) diff --git a/gcloud/storage/client.py b/gcloud/storage/client.py index 7aac6e847b2f..2c36a9612ffa 100644 --- a/gcloud/storage/client.py +++ b/gcloud/storage/client.py @@ -16,6 +16,8 @@ from gcloud.client import JSONClient +from gcloud.exceptions import NotFound +from gcloud.storage.bucket import Bucket from gcloud.storage.connection import Connection @@ -41,3 +43,53 @@ class Client(JSONClient): """ _connection_class = Connection + + def get_bucket(self, bucket_name): + """Get a bucket by name. + + If the bucket isn't found, this will raise a + :class:`gcloud.storage.exceptions.NotFound`. + + For example:: + + >>> try: + >>> bucket = client.get_bucket('my-bucket') + >>> except gcloud.exceptions.NotFound: + >>> print 'Sorry, that bucket does not exist!' + + This implements "storage.buckets.get". + + :type bucket_name: string + :param bucket_name: The name of the bucket to get. + + :rtype: :class:`gcloud.storage.bucket.Bucket` + :returns: The bucket matching the name provided. + :raises: :class:`gcloud.exceptions.NotFound` + """ + bucket = Bucket(bucket_name) + bucket.reload(connection=self.connection) + return bucket + + def lookup_bucket(self, bucket_name): + """Get a bucket by name, returning None if not found. + + You can use this if you would rather check for a None value + than catching an exception:: + + >>> bucket = client.lookup_bucket('doesnt-exist') + >>> print bucket + None + >>> bucket = client.lookup_bucket('my-bucket') + >>> print bucket + + + :type bucket_name: string + :param bucket_name: The name of the bucket to get. + + :rtype: :class:`gcloud.storage.bucket.Bucket` + :returns: The bucket matching the name provided or None if not found. + """ + try: + return self.get_bucket(bucket_name) + except NotFound: + return None diff --git a/gcloud/storage/test_client.py b/gcloud/storage/test_client.py index 5e46410653d2..24982417349f 100644 --- a/gcloud/storage/test_client.py +++ b/gcloud/storage/test_client.py @@ -34,6 +34,103 @@ def test_ctor_connection_type(self): self.assertTrue(isinstance(client.connection, Connection)) self.assertTrue(client.connection.credentials is CREDENTIALS) + def test_get_bucket_miss(self): + from gcloud.exceptions import NotFound + + PROJECT = object() + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + NONESUCH = 'nonesuch' + URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b', + 'nonesuch?projection=noAcl', + ]) + http = client.connection._http = _Http( + {'status': '404', 'content-type': 'application/json'}, + b'{}', + ) + self.assertRaises(NotFound, client.get_bucket, NONESUCH) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + + def test_get_bucket_hit(self): + from gcloud.storage.bucket import Bucket + + PROJECT = object() + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + BLOB_NAME = 'blob-name' + URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b', + '%s?projection=noAcl' % (BLOB_NAME,), + ]) + http = client.connection._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), + ) + + bucket = client.get_bucket(BLOB_NAME) + self.assertTrue(isinstance(bucket, Bucket)) + self.assertEqual(bucket.name, BLOB_NAME) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + + def test_lookup_bucket_miss(self): + PROJECT = object() + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + NONESUCH = 'nonesuch' + URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b', + 'nonesuch?projection=noAcl', + ]) + http = client.connection._http = _Http( + {'status': '404', 'content-type': 'application/json'}, + b'{}', + ) + bucket = client.lookup_bucket(NONESUCH) + self.assertEqual(bucket, None) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + + def test_lookup_bucket_hit(self): + from gcloud.storage.bucket import Bucket + + PROJECT = object() + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + BLOB_NAME = 'blob-name' + URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b', + '%s?projection=noAcl' % (BLOB_NAME,), + ]) + http = client.connection._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), + ) + + bucket = client.lookup_bucket(BLOB_NAME) + self.assertTrue(isinstance(bucket, Bucket)) + self.assertEqual(bucket.name, BLOB_NAME) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + class _Credentials(object): @@ -46,3 +143,17 @@ def create_scoped_required(): def create_scoped(self, scope): self._scopes = scope return self + + +class _Http(object): + + _called_with = None + + def __init__(self, headers, content): + from httplib2 import Response + self._response = Response(headers) + self._content = content + + def request(self, **kw): + self._called_with = kw + return self._response, self._content From 5d3f1d8868ce872c5b2cc67d58fdc4fc9b7e85db Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Jul 2015 10:46:17 -0700 Subject: [PATCH 2/6] Implementing Client.create_bucket in storage. --- gcloud/storage/client.py | 24 ++++++++++++++++++ gcloud/storage/test_client.py | 48 +++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+) diff --git a/gcloud/storage/client.py b/gcloud/storage/client.py index 2c36a9612ffa..f550ed500c5f 100644 --- a/gcloud/storage/client.py +++ b/gcloud/storage/client.py @@ -93,3 +93,27 @@ def lookup_bucket(self, bucket_name): return self.get_bucket(bucket_name) except NotFound: return None + + def create_bucket(self, bucket_name): + """Create a new bucket. + + For example:: + + >>> bucket = client.create_bucket('my-bucket') + >>> print bucket + + + This implements "storage.buckets.insert". + + If the bucket already exists, will raise + :class:`gcloud.exceptions.Conflict`. + + :type bucket_name: string + :param bucket_name: The bucket name to create. + + :rtype: :class:`gcloud.storage.bucket.Bucket` + :returns: The newly created bucket. + """ + bucket = Bucket(bucket_name) + bucket.create(self.project, connection=self.connection) + return bucket diff --git a/gcloud/storage/test_client.py b/gcloud/storage/test_client.py index 24982417349f..7de9631b3220 100644 --- a/gcloud/storage/test_client.py +++ b/gcloud/storage/test_client.py @@ -131,6 +131,54 @@ def test_lookup_bucket_hit(self): self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) + def test_create_bucket_conflict(self): + from gcloud.exceptions import Conflict + + PROJECT = 'PROJECT' + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + BLOB_NAME = 'blob-name' + URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b?project=%s' % (PROJECT,), + ]) + http = client.connection._http = _Http( + {'status': '409', 'content-type': 'application/json'}, + '{"error": {"message": "Conflict"}}', + ) + + self.assertRaises(Conflict, client.create_bucket, BLOB_NAME) + self.assertEqual(http._called_with['method'], 'POST') + self.assertEqual(http._called_with['uri'], URI) + + def test_create_bucket_success(self): + from gcloud.storage.bucket import Bucket + + PROJECT = 'PROJECT' + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + BLOB_NAME = 'blob-name' + URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b?project=%s' % (PROJECT,), + ]) + http = client.connection._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), + ) + + bucket = client.create_bucket(BLOB_NAME) + self.assertTrue(isinstance(bucket, Bucket)) + self.assertEqual(bucket.name, BLOB_NAME) + self.assertEqual(http._called_with['method'], 'POST') + self.assertEqual(http._called_with['uri'], URI) + class _Credentials(object): From 0dfd1a982e43d01c2f0321b1010f46b20a121fee Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Jul 2015 10:59:44 -0700 Subject: [PATCH 3/6] Implementing Client.list_buckets in storage. --- gcloud/storage/client.py | 60 +++++++++++++++++++ gcloud/storage/test_client.py | 110 ++++++++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+) diff --git a/gcloud/storage/client.py b/gcloud/storage/client.py index f550ed500c5f..e3c97b698482 100644 --- a/gcloud/storage/client.py +++ b/gcloud/storage/client.py @@ -17,6 +17,7 @@ from gcloud.client import JSONClient from gcloud.exceptions import NotFound +from gcloud.storage.api import _BucketIterator from gcloud.storage.bucket import Bucket from gcloud.storage.connection import Connection @@ -117,3 +118,62 @@ def create_bucket(self, bucket_name): bucket = Bucket(bucket_name) bucket.create(self.project, connection=self.connection) return bucket + + def list_buckets(self, max_results=None, page_token=None, prefix=None, + projection='noAcl', fields=None): + """Get all buckets in the project associated to the client. + + This will not populate the list of blobs available in each + bucket. + + >>> for bucket in client.list_buckets(): + >>> print bucket + + This implements "storage.buckets.list". + + :type max_results: integer or ``NoneType`` + :param max_results: Optional. Maximum number of buckets to return. + + :type page_token: string or ``NoneType`` + :param page_token: Optional. Opaque marker for the next "page" of + buckets. If not passed, will return the first page + of buckets. + + :type prefix: string or ``NoneType`` + :param prefix: Optional. Filter results to buckets whose names begin + with this prefix. + + :type projection: string or ``NoneType`` + :param projection: If used, must be 'full' or 'noAcl'. Defaults to + 'noAcl'. Specifies the set of properties to return. + + :type fields: string or ``NoneType`` + :param fields: Selector specifying which fields to include in a + partial response. Must be a list of fields. For example + to get a partial response with just the next page token + and the language of each bucket returned: + 'items/id,nextPageToken' + + :rtype: iterable of :class:`gcloud.storage.bucket.Bucket` objects. + :returns: All buckets belonging to this project. + """ + extra_params = {'project': self.project} + + if max_results is not None: + extra_params['maxResults'] = max_results + + if prefix is not None: + extra_params['prefix'] = prefix + + extra_params['projection'] = projection + + if fields is not None: + extra_params['fields'] = fields + + result = _BucketIterator(connection=self.connection, + extra_params=extra_params) + # Page token must be handled specially since the base `Iterator` + # class has it as a reserved property. + if page_token is not None: + result.next_page_token = page_token + return result diff --git a/gcloud/storage/test_client.py b/gcloud/storage/test_client.py index 7de9631b3220..b1c1a50ec052 100644 --- a/gcloud/storage/test_client.py +++ b/gcloud/storage/test_client.py @@ -179,6 +179,116 @@ def test_create_bucket_success(self): self.assertEqual(http._called_with['method'], 'POST') self.assertEqual(http._called_with['uri'], URI) + def test_list_buckets_empty(self): + from six.moves.urllib.parse import parse_qs + from six.moves.urllib.parse import urlparse + + PROJECT = 'PROJECT' + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + EXPECTED_QUERY = { + 'project': [PROJECT], + 'projection': ['noAcl'], + } + http = client.connection._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + buckets = list(client.list_buckets()) + self.assertEqual(len(buckets), 0) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['body'], None) + + BASE_URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b', + ]) + URI = http._called_with['uri'] + self.assertTrue(URI.startswith(BASE_URI)) + uri_parts = urlparse(URI) + self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY) + + def test_list_buckets_non_empty(self): + from six.moves.urllib.parse import parse_qs + from six.moves.urllib.parse import urlencode + from six.moves.urllib.parse import urlparse + PROJECT = 'PROJECT' + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + BUCKET_NAME = 'bucket-name' + query_params = urlencode({'project': PROJECT, 'projection': 'noAcl'}) + BASE_URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + ]) + URI = '/'.join([BASE_URI, 'b?%s' % (query_params,)]) + http = client.connection._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + '{{"items": [{{"name": "{0}"}}]}}'.format(BUCKET_NAME) + .encode('utf-8'), + ) + buckets = list(client.list_buckets()) + self.assertEqual(len(buckets), 1) + self.assertEqual(buckets[0].name, BUCKET_NAME) + self.assertEqual(http._called_with['method'], 'GET') + self.assertTrue(http._called_with['uri'].startswith(BASE_URI)) + self.assertEqual(parse_qs(urlparse(http._called_with['uri']).query), + parse_qs(urlparse(URI).query)) + + def test_list_buckets_all_arguments(self): + from six.moves.urllib.parse import parse_qs + from six.moves.urllib.parse import urlparse + + PROJECT = 'foo-bar' + CREDENTIALS = _Credentials() + client = self._makeOne(project=PROJECT, credentials=CREDENTIALS) + + MAX_RESULTS = 10 + PAGE_TOKEN = 'ABCD' + PREFIX = 'subfolder' + PROJECTION = 'full' + FIELDS = 'items/id,nextPageToken' + EXPECTED_QUERY = { + 'project': [PROJECT], + 'maxResults': [str(MAX_RESULTS)], + 'pageToken': [PAGE_TOKEN], + 'prefix': [PREFIX], + 'projection': [PROJECTION], + 'fields': [FIELDS], + } + + http = client.connection._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + '{"items": []}', + ) + iterator = client.list_buckets( + max_results=MAX_RESULTS, + page_token=PAGE_TOKEN, + prefix=PREFIX, + projection=PROJECTION, + fields=FIELDS, + ) + buckets = list(iterator) + self.assertEqual(buckets, []) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['body'], None) + + BASE_URI = '/'.join([ + client.connection.API_BASE_URL, + 'storage', + client.connection.API_VERSION, + 'b' + ]) + URI = http._called_with['uri'] + self.assertTrue(URI.startswith(BASE_URI)) + uri_parts = urlparse(URI) + self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY) + class _Credentials(object): From c262e4536c706bbb938df72568d542cb8a209a1a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Jul 2015 11:08:54 -0700 Subject: [PATCH 4/6] Moving _BucketIterator from storage.api to client. --- gcloud/storage/api.py | 34 +--------------------------------- gcloud/storage/client.py | 33 ++++++++++++++++++++++++++++++++- gcloud/storage/test_api.py | 34 ---------------------------------- gcloud/storage/test_client.py | 34 ++++++++++++++++++++++++++++++++++ 4 files changed, 67 insertions(+), 68 deletions(-) diff --git a/gcloud/storage/api.py b/gcloud/storage/api.py index a0eb967a30fe..a4870bb8b09c 100644 --- a/gcloud/storage/api.py +++ b/gcloud/storage/api.py @@ -20,9 +20,9 @@ from gcloud.exceptions import NotFound from gcloud._helpers import get_default_project -from gcloud.iterator import Iterator from gcloud.storage._helpers import _require_connection from gcloud.storage.bucket import Bucket +from gcloud.storage.client import _BucketIterator def lookup_bucket(bucket_name, connection=None): @@ -196,35 +196,3 @@ def create_bucket(bucket_name, project=None, connection=None): bucket = Bucket(bucket_name) bucket.create(project, connection=connection) return bucket - - -class _BucketIterator(Iterator): - """An iterator listing all buckets. - - You shouldn't have to use this directly, but instead should use the - helper methods on :class:`gcloud.storage.connection.Connection` - objects. - - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: The connection to use for querying the list of buckets. - - :type extra_params: dict or ``NoneType`` - :param extra_params: Extra query string parameters for the API call. - """ - - def __init__(self, connection, extra_params=None): - connection = _require_connection(connection) - super(_BucketIterator, self).__init__(connection=connection, path='/b', - extra_params=extra_params) - - def get_items_from_response(self, response): - """Factory method which yields :class:`.Bucket` items from a response. - - :type response: dict - :param response: The JSON API response for a page of buckets. - """ - for item in response.get('items', []): - name = item.get('name') - bucket = Bucket(name) - bucket._set_properties(item) - yield bucket diff --git a/gcloud/storage/client.py b/gcloud/storage/client.py index e3c97b698482..ac5ffb4bd227 100644 --- a/gcloud/storage/client.py +++ b/gcloud/storage/client.py @@ -17,7 +17,7 @@ from gcloud.client import JSONClient from gcloud.exceptions import NotFound -from gcloud.storage.api import _BucketIterator +from gcloud.iterator import Iterator from gcloud.storage.bucket import Bucket from gcloud.storage.connection import Connection @@ -177,3 +177,34 @@ def list_buckets(self, max_results=None, page_token=None, prefix=None, if page_token is not None: result.next_page_token = page_token return result + + +class _BucketIterator(Iterator): + """An iterator listing all buckets. + + You shouldn't have to use this directly, but instead should use the + helper methods on :class:`gcloud.storage.connection.Connection` + objects. + + :type connection: :class:`gcloud.storage.connection.Connection` + :param connection: The connection to use for querying the list of buckets. + + :type extra_params: dict or ``NoneType`` + :param extra_params: Extra query string parameters for the API call. + """ + + def __init__(self, connection, extra_params=None): + super(_BucketIterator, self).__init__(connection=connection, path='/b', + extra_params=extra_params) + + def get_items_from_response(self, response): + """Factory method which yields :class:`.Bucket` items from a response. + + :type response: dict + :param response: The JSON API response for a page of buckets. + """ + for item in response.get('items', []): + name = item.get('name') + bucket = Bucket(name) + bucket._set_properties(item) + yield bucket diff --git a/gcloud/storage/test_api.py b/gcloud/storage/test_api.py index c3b098e243fa..f8f1dcdb5e9a 100644 --- a/gcloud/storage/test_api.py +++ b/gcloud/storage/test_api.py @@ -310,40 +310,6 @@ def test_success_use_default(self): self._create_bucket_success_helper('PROJECT', use_default=True) -class Test__BucketIterator(unittest2.TestCase): - - def _getTargetClass(self): - from gcloud.storage.api import _BucketIterator - return _BucketIterator - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_ctor(self): - connection = object() - iterator = self._makeOne(connection) - self.assertEqual(iterator.path, '/b') - self.assertEqual(iterator.page_number, 0) - self.assertEqual(iterator.next_page_token, None) - - def test_get_items_from_response_empty(self): - connection = object() - iterator = self._makeOne(connection) - self.assertEqual(list(iterator.get_items_from_response({})), []) - - def test_get_items_from_response_non_empty(self): - from gcloud.storage.bucket import Bucket - BLOB_NAME = 'blob-name' - response = {'items': [{'name': BLOB_NAME}]} - connection = object() - iterator = self._makeOne(connection) - buckets = list(iterator.get_items_from_response(response)) - self.assertEqual(len(buckets), 1) - bucket = buckets[0] - self.assertTrue(isinstance(bucket, Bucket)) - self.assertEqual(bucket.name, BLOB_NAME) - - class Http(object): _called_with = None diff --git a/gcloud/storage/test_client.py b/gcloud/storage/test_client.py index b1c1a50ec052..4404a1202e32 100644 --- a/gcloud/storage/test_client.py +++ b/gcloud/storage/test_client.py @@ -290,6 +290,40 @@ def test_list_buckets_all_arguments(self): self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY) +class Test__BucketIterator(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.storage.client import _BucketIterator + return _BucketIterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = object() + iterator = self._makeOne(connection) + self.assertEqual(iterator.path, '/b') + self.assertEqual(iterator.page_number, 0) + self.assertEqual(iterator.next_page_token, None) + + def test_get_items_from_response_empty(self): + connection = object() + iterator = self._makeOne(connection) + self.assertEqual(list(iterator.get_items_from_response({})), []) + + def test_get_items_from_response_non_empty(self): + from gcloud.storage.bucket import Bucket + BLOB_NAME = 'blob-name' + response = {'items': [{'name': BLOB_NAME}]} + connection = object() + iterator = self._makeOne(connection) + buckets = list(iterator.get_items_from_response(response)) + self.assertEqual(len(buckets), 1) + bucket = buckets[0] + self.assertTrue(isinstance(bucket, Bucket)) + self.assertEqual(bucket.name, BLOB_NAME) + + class _Credentials(object): _scopes = None From 15501000988072931c3cfd52bd132da973d67e78 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Jul 2015 11:12:38 -0700 Subject: [PATCH 5/6] Removing storage.api module. --- docs/_components/storage-getting-started.rst | 4 +- gcloud/storage/__init__.py | 4 - gcloud/storage/api.py | 198 ------------ gcloud/storage/test_api.py | 324 ------------------- 4 files changed, 2 insertions(+), 528 deletions(-) delete mode 100644 gcloud/storage/api.py delete mode 100644 gcloud/storage/test_api.py diff --git a/docs/_components/storage-getting-started.rst b/docs/_components/storage-getting-started.rst index ed0bec69ea5d..ccf9be46b0bc 100644 --- a/docs/_components/storage-getting-started.rst +++ b/docs/_components/storage-getting-started.rst @@ -150,10 +150,10 @@ Accessing a bucket ------------------ If you already have a bucket, use -:func:`get_bucket ` to retrieve the +:meth:`get_bucket ` to retrieve the bucket object:: - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> bucket = client.get_bucket('my-bucket') If you want to get all the blobs in the bucket, you can use :func:`list_blobs `:: diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index 8097e51c9b6c..8a2b0c0d1a73 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -47,10 +47,6 @@ from gcloud.storage._implicit_environ import get_default_bucket from gcloud.storage._implicit_environ import get_default_connection from gcloud.storage._implicit_environ import set_default_connection -from gcloud.storage.api import create_bucket -from gcloud.storage.api import get_bucket -from gcloud.storage.api import list_buckets -from gcloud.storage.api import lookup_bucket from gcloud.storage.batch import Batch from gcloud.storage.blob import Blob from gcloud.storage.bucket import Bucket diff --git a/gcloud/storage/api.py b/gcloud/storage/api.py deleted file mode 100644 index a4870bb8b09c..000000000000 --- a/gcloud/storage/api.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright 2015 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Methods for interacting with Google Cloud Storage. - -Allows interacting with Cloud Storage via user-friendly objects -rather than via Connection. -""" - -from gcloud.exceptions import NotFound -from gcloud._helpers import get_default_project -from gcloud.storage._helpers import _require_connection -from gcloud.storage.bucket import Bucket -from gcloud.storage.client import _BucketIterator - - -def lookup_bucket(bucket_name, connection=None): - """Get a bucket by name, returning None if not found. - - You can use this if you would rather checking for a None value - than catching an exception:: - - >>> from gcloud import storage - >>> bucket = storage.lookup_bucket('doesnt-exist') - >>> print bucket - None - >>> bucket = storage.lookup_bucket('my-bucket') - >>> print bucket - - - :type bucket_name: string - :param bucket_name: The name of the bucket to get. - - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending requests. - If not provided, falls back to default. - - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The bucket matching the name provided or None if not found. - """ - try: - return get_bucket(bucket_name, connection=connection) - except NotFound: - return None - - -def list_buckets(project=None, max_results=None, page_token=None, prefix=None, - projection='noAcl', fields=None, connection=None): - """Get all buckets in the project. - - This will not populate the list of blobs available in each - bucket. - - >>> from gcloud import storage - >>> for bucket in storage.list_buckets(): - >>> print bucket - - This implements "storage.buckets.list". - - :type project: string or ``NoneType`` - :param project: Optional. The project to use when listing all buckets. - If not provided, falls back to default. - - :type max_results: integer or ``NoneType`` - :param max_results: Optional. Maximum number of buckets to return. - - :type page_token: string or ``NoneType`` - :param page_token: Optional. Opaque marker for the next "page" of buckets. - If not passed, will return the first page of buckets. - - :type prefix: string or ``NoneType`` - :param prefix: Optional. Filter results to buckets whose names begin with - this prefix. - - :type projection: string or ``NoneType`` - :param projection: If used, must be 'full' or 'noAcl'. Defaults to - 'noAcl'. Specifies the set of properties to return. - - :type fields: string or ``NoneType`` - :param fields: Selector specifying which fields to include in a - partial response. Must be a list of fields. For example - to get a partial response with just the next page token - and the language of each bucket returned: - 'items/id,nextPageToken' - - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending requests. - If not provided, falls back to default. - - :rtype: iterable of :class:`gcloud.storage.bucket.Bucket` objects. - :returns: All buckets belonging to this project. - """ - if project is None: - project = get_default_project() - extra_params = {'project': project} - - if max_results is not None: - extra_params['maxResults'] = max_results - - if prefix is not None: - extra_params['prefix'] = prefix - - extra_params['projection'] = projection - - if fields is not None: - extra_params['fields'] = fields - - result = _BucketIterator(connection=connection, - extra_params=extra_params) - # Page token must be handled specially since the base `Iterator` - # class has it as a reserved property. - if page_token is not None: - result.next_page_token = page_token - return result - - -def get_bucket(bucket_name, connection=None): - """Get a bucket by name. - - If the bucket isn't found, this will raise a - :class:`gcloud.storage.exceptions.NotFound`. - - For example:: - - >>> from gcloud import storage - >>> from gcloud.exceptions import NotFound - >>> try: - >>> bucket = storage.get_bucket('my-bucket') - >>> except NotFound: - >>> print 'Sorry, that bucket does not exist!' - - This implements "storage.buckets.get". - - :type bucket_name: string - :param bucket_name: The name of the bucket to get. - - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending requests. - If not provided, falls back to default. - - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The bucket matching the name provided. - :raises: :class:`gcloud.exceptions.NotFound` - """ - connection = _require_connection(connection) - bucket = Bucket(bucket_name) - bucket.reload(connection=connection) - return bucket - - -def create_bucket(bucket_name, project=None, connection=None): - """Create a new bucket. - - For example:: - - >>> from gcloud import storage - >>> bucket = storage.create_bucket('my-bucket') - >>> print bucket - - - This implements "storage.buckets.insert". - - If the bucket already exists, will raise - :class:`gcloud.exceptions.Conflict`. - - :type project: string - :param project: Optional. The project to use when creating bucket. - If not provided, falls back to default. - - :type bucket_name: string - :param bucket_name: The bucket name to create. - - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending requests. - If not provided, falls back to default. - - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The newly created bucket. - """ - connection = _require_connection(connection) - bucket = Bucket(bucket_name) - bucket.create(project, connection=connection) - return bucket diff --git a/gcloud/storage/test_api.py b/gcloud/storage/test_api.py deleted file mode 100644 index f8f1dcdb5e9a..000000000000 --- a/gcloud/storage/test_api.py +++ /dev/null @@ -1,324 +0,0 @@ -# Copyright 2015 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest2 - - -class Test_lookup_bucket(unittest2.TestCase): - - def _callFUT(self, bucket_name, connection=None): - from gcloud.storage.api import lookup_bucket - return lookup_bucket(bucket_name, connection=connection) - - def test_miss(self): - from gcloud.storage.connection import Connection - NONESUCH = 'nonesuch' - conn = Connection() - URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'nonesuch?projection=noAcl', - ]) - http = conn._http = Http( - {'status': '404', 'content-type': 'application/json'}, - b'{}', - ) - bucket = self._callFUT(NONESUCH, connection=conn) - self.assertEqual(bucket, None) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - - def _lookup_bucket_hit_helper(self, use_default=False): - from gcloud.storage._testing import _monkey_defaults - from gcloud.storage.bucket import Bucket - from gcloud.storage.connection import Connection - BLOB_NAME = 'blob-name' - conn = Connection() - URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - '%s?projection=noAcl' % (BLOB_NAME,), - ]) - http = conn._http = Http( - {'status': '200', 'content-type': 'application/json'}, - '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), - ) - - if use_default: - with _monkey_defaults(connection=conn): - bucket = self._callFUT(BLOB_NAME) - else: - bucket = self._callFUT(BLOB_NAME, connection=conn) - - self.assertTrue(isinstance(bucket, Bucket)) - self.assertEqual(bucket.name, BLOB_NAME) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - - def test_hit(self): - self._lookup_bucket_hit_helper(use_default=False) - - def test_use_default(self): - self._lookup_bucket_hit_helper(use_default=True) - - -class Test_list_buckets(unittest2.TestCase): - - def _callFUT(self, *args, **kwargs): - from gcloud.storage.api import list_buckets - return list_buckets(*args, **kwargs) - - def test_empty(self): - from six.moves.urllib.parse import parse_qs - from six.moves.urllib.parse import urlparse - from gcloud.storage.connection import Connection - PROJECT = 'project' - conn = Connection() - EXPECTED_QUERY = { - 'project': [PROJECT], - 'projection': ['noAcl'], - } - http = conn._http = Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) - buckets = list(self._callFUT(project=PROJECT, connection=conn)) - self.assertEqual(len(buckets), 0) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['body'], None) - - BASE_URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - ]) - URI = http._called_with['uri'] - self.assertTrue(URI.startswith(BASE_URI)) - uri_parts = urlparse(URI) - self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY) - - def _list_buckets_non_empty_helper(self, project, use_default=False): - from six.moves.urllib.parse import parse_qs - from six.moves.urllib.parse import urlencode - from six.moves.urllib.parse import urlparse - from gcloud._testing import _monkey_defaults as _base_monkey_defaults - from gcloud.storage._testing import _monkey_defaults - from gcloud.storage.connection import Connection - BUCKET_NAME = 'bucket-name' - conn = Connection() - query_params = urlencode({'project': project, 'projection': 'noAcl'}) - BASE_URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - ]) - URI = '/'.join([BASE_URI, 'b?%s' % (query_params,)]) - http = conn._http = Http( - {'status': '200', 'content-type': 'application/json'}, - '{{"items": [{{"name": "{0}"}}]}}'.format(BUCKET_NAME) - .encode('utf-8'), - ) - - if use_default: - with _base_monkey_defaults(project=project): - with _monkey_defaults(connection=conn): - buckets = list(self._callFUT()) - else: - buckets = list(self._callFUT(project=project, connection=conn)) - - self.assertEqual(len(buckets), 1) - self.assertEqual(buckets[0].name, BUCKET_NAME) - self.assertEqual(http._called_with['method'], 'GET') - self.assertTrue(http._called_with['uri'].startswith(BASE_URI)) - self.assertEqual(parse_qs(urlparse(http._called_with['uri']).query), - parse_qs(urlparse(URI).query)) - - def test_non_empty(self): - self._list_buckets_non_empty_helper('PROJECT', use_default=False) - - def test_non_use_default(self): - self._list_buckets_non_empty_helper('PROJECT', use_default=True) - - def test_all_arguments(self): - from six.moves.urllib.parse import parse_qs - from six.moves.urllib.parse import urlparse - from gcloud.storage.connection import Connection - PROJECT = 'foo-bar' - MAX_RESULTS = 10 - PAGE_TOKEN = 'ABCD' - PREFIX = 'subfolder' - PROJECTION = 'full' - FIELDS = 'items/id,nextPageToken' - EXPECTED_QUERY = { - 'project': [PROJECT], - 'maxResults': [str(MAX_RESULTS)], - 'pageToken': [PAGE_TOKEN], - 'prefix': [PREFIX], - 'projection': [PROJECTION], - 'fields': [FIELDS], - } - CONNECTION = Connection() - http = CONNECTION._http = Http( - {'status': '200', 'content-type': 'application/json'}, - '{"items": []}', - ) - iterator = self._callFUT( - project=PROJECT, - max_results=MAX_RESULTS, - page_token=PAGE_TOKEN, - prefix=PREFIX, - projection=PROJECTION, - fields=FIELDS, - connection=CONNECTION, - ) - buckets = list(iterator) - self.assertEqual(buckets, []) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['body'], None) - - BASE_URI = '/'.join([ - CONNECTION.API_BASE_URL, - 'storage', - CONNECTION.API_VERSION, - 'b' - ]) - URI = http._called_with['uri'] - self.assertTrue(URI.startswith(BASE_URI)) - uri_parts = urlparse(URI) - self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY) - - -class Test_get_bucket(unittest2.TestCase): - - def _callFUT(self, bucket_name, connection=None): - from gcloud.storage.api import get_bucket - return get_bucket(bucket_name, connection=connection) - - def test_miss(self): - from gcloud.exceptions import NotFound - from gcloud.storage.connection import Connection - NONESUCH = 'nonesuch' - conn = Connection() - URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'nonesuch?projection=noAcl', - ]) - http = conn._http = Http( - {'status': '404', 'content-type': 'application/json'}, - b'{}', - ) - self.assertRaises(NotFound, self._callFUT, NONESUCH, connection=conn) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - - def _get_bucket_hit_helper(self, use_default=False): - from gcloud.storage._testing import _monkey_defaults - from gcloud.storage.bucket import Bucket - from gcloud.storage.connection import Connection - BLOB_NAME = 'blob-name' - conn = Connection() - URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - '%s?projection=noAcl' % (BLOB_NAME,), - ]) - http = conn._http = Http( - {'status': '200', 'content-type': 'application/json'}, - '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), - ) - - if use_default: - with _monkey_defaults(connection=conn): - bucket = self._callFUT(BLOB_NAME) - else: - bucket = self._callFUT(BLOB_NAME, connection=conn) - - self.assertTrue(isinstance(bucket, Bucket)) - self.assertEqual(bucket.name, BLOB_NAME) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - - def test_hit(self): - self._get_bucket_hit_helper(use_default=False) - - def test_hit_use_default(self): - self._get_bucket_hit_helper(use_default=True) - - -class Test_create_bucket(unittest2.TestCase): - - def _callFUT(self, bucket_name, project=None, connection=None): - from gcloud.storage.api import create_bucket - return create_bucket(bucket_name, project=project, - connection=connection) - - def _create_bucket_success_helper(self, project, use_default=False): - from gcloud._testing import _monkey_defaults as _base_monkey_defaults - from gcloud.storage._testing import _monkey_defaults - from gcloud.storage.connection import Connection - from gcloud.storage.bucket import Bucket - BLOB_NAME = 'blob-name' - conn = Connection() - URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b?project=%s' % project, - ]) - http = conn._http = Http( - {'status': '200', 'content-type': 'application/json'}, - '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), - ) - - if use_default: - with _base_monkey_defaults(project=project): - with _monkey_defaults(connection=conn): - bucket = self._callFUT(BLOB_NAME) - else: - bucket = self._callFUT(BLOB_NAME, project=project, connection=conn) - - self.assertTrue(isinstance(bucket, Bucket)) - self.assertEqual(bucket.name, BLOB_NAME) - self.assertEqual(http._called_with['method'], 'POST') - self.assertEqual(http._called_with['uri'], URI) - - def test_success(self): - self._create_bucket_success_helper('PROJECT', use_default=False) - - def test_success_use_default(self): - self._create_bucket_success_helper('PROJECT', use_default=True) - - -class Http(object): - - _called_with = None - - def __init__(self, headers, content): - from httplib2 import Response - self._response = Response(headers) - self._content = content - - def request(self, **kw): - self._called_with = kw - return self._response, self._content From 1b1fb11c0eff8d470933d934ad62bdac6514c9af Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Jul 2015 12:19:10 -0700 Subject: [PATCH 6/6] Updating docs, demo and system tests after storage.api deletion. --- README.rst | 3 ++- docs/_components/storage-getting-started.rst | 6 ++--- docs/_components/storage-quickstart.rst | 4 +-- docs/index.rst | 3 ++- gcloud/storage/__init__.py | 4 ++- gcloud/storage/acl.py | 4 +-- gcloud/storage/bucket.py | 28 ++++++++++---------- gcloud/storage/demo/__init__.py | 13 +-------- gcloud/storage/demo/demo.py | 10 +++---- system_tests/storage.py | 13 ++++----- 10 files changed, 41 insertions(+), 47 deletions(-) diff --git a/README.rst b/README.rst index 2496fa291794..a8a4abf5cba7 100644 --- a/README.rst +++ b/README.rst @@ -101,7 +101,8 @@ how to create a bucket. .. code:: python from gcloud import storage - bucket = storage.get_bucket('bucket-id-here') + client = storage.Client() + bucket = client.get_bucket('bucket-id-here') # Then do other things... blob = bucket.get_blob('/remote/path/to/file.txt') print blob.download_as_string() diff --git a/docs/_components/storage-getting-started.rst b/docs/_components/storage-getting-started.rst index ccf9be46b0bc..eb86bf6a49b6 100644 --- a/docs/_components/storage-getting-started.rst +++ b/docs/_components/storage-getting-started.rst @@ -56,7 +56,7 @@ bucket. Let's create a bucket: - >>> bucket = storage.create_bucket('test', project_name, connection=connection) + >>> bucket = client.create_bucket('test') Traceback (most recent call last): File "", line 1, in File "gcloud/storage/connection.py", line 340, in create_bucket @@ -184,8 +184,8 @@ If you have a full bucket, you can delete it this way:: Listing available buckets ------------------------- - >>> for bucket in storage.list_buckets(connection): - ... print bucket.name + >>> for bucket in client.list_buckets(): + ... print bucket.name Managing access control ----------------------- diff --git a/docs/_components/storage-quickstart.rst b/docs/_components/storage-quickstart.rst index 64c300778031..5d5137092dfb 100644 --- a/docs/_components/storage-quickstart.rst +++ b/docs/_components/storage-quickstart.rst @@ -56,9 +56,9 @@ Once you have the connection, you can create buckets and blobs:: >>> from gcloud import storage - >>> storage.list_buckets(connection) + >>> client.list_buckets() [, ...] - >>> bucket = storage.create_bucket('my-new-bucket', connection=connection) + >>> bucket = client.create_bucket('my-new-bucket') >>> print bucket >>> blob = storage.Blob('my-test-file.txt', bucket=bucket) diff --git a/docs/index.rst b/docs/index.rst index 963d2fe1cbce..2d418866a740 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -50,6 +50,7 @@ Cloud Storage .. code-block:: python from gcloud import storage - bucket = storage.get_bucket('') + client = storage.Client() + bucket = client.get_bucket('') blob = storage.Blob('my-test-file.txt', bucket=bucket) blob = blob.upload_contents_from_string('this is test content!') diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index 8a2b0c0d1a73..8fa382865df2 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -17,7 +17,8 @@ You'll typically use these to get started with the API: >>> from gcloud import storage ->>> bucket = storage.get_bucket('bucket-id-here') +>>> client = storage.Client() +>>> bucket = client.get_bucket('bucket-id-here') >>> # Then do other things... >>> blob = bucket.get_blob('/remote/path/to/file.txt') >>> print blob.download_as_string() @@ -50,6 +51,7 @@ from gcloud.storage.batch import Batch from gcloud.storage.blob import Blob from gcloud.storage.bucket import Bucket +from gcloud.storage.client import Client from gcloud.storage.connection import SCOPE from gcloud.storage.connection import Connection diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 45725d12afdc..673aa05d744d 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -19,8 +19,8 @@ :func:`gcloud.storage.bucket.Bucket.acl`:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket(bucket_name, connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket(bucket_name) >>> acl = bucket.acl Adding and removing permissions can be done with the following methods diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 02d66d9561a8..a20cbe408d5c 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -194,8 +194,8 @@ def get_blob(self, blob_name, connection=None): This will return None if the blob doesn't exist:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> print bucket.get_blob('/path/to/blob.txt') >>> print bucket.get_blob('/does-not-exist.txt') @@ -356,8 +356,8 @@ def delete_blob(self, blob_name, connection=None): >>> from gcloud.exceptions import NotFound >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> print bucket.list_blobs() [] >>> bucket.delete_blob('my-file.txt') @@ -463,8 +463,8 @@ def upload_file(self, filename, blob_name=None, connection=None): For example:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt') >>> print bucket.list_blobs() [] @@ -473,8 +473,8 @@ def upload_file(self, filename, blob_name=None, connection=None): using the local filename (**not** the complete path):: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file('~/my-file.txt') >>> print bucket.list_blobs() [] @@ -510,8 +510,8 @@ def upload_file_object(self, file_obj, blob_name=None, connection=None): For example:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt') >>> print bucket.list_blobs() [] @@ -520,8 +520,8 @@ def upload_file_object(self, file_obj, blob_name=None, connection=None): using the local filename (**not** the complete path):: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file(open('~/my-file.txt')) >>> print bucket.list_blobs() [] @@ -788,8 +788,8 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): of an index page and a page to use when a blob isn't found:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket(bucket_name, connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket(bucket_name) >>> bucket.configure_website('index.html', '404.html') You probably should also make the whole bucket public:: diff --git a/gcloud/storage/demo/__init__.py b/gcloud/storage/demo/__init__.py index 0558e7f8bf6c..a441bc2508b2 100644 --- a/gcloud/storage/demo/__init__.py +++ b/gcloud/storage/demo/__init__.py @@ -13,18 +13,7 @@ # limitations under the License. import os -from gcloud import storage -__all__ = ['create_bucket', 'list_buckets', 'PROJECT_ID'] +__all__ = ['PROJECT_ID'] PROJECT_ID = os.getenv('GCLOUD_TESTS_PROJECT_ID') - - -def list_buckets(connection): - return list(storage.list_buckets(project=PROJECT_ID, - connection=connection)) - - -def create_bucket(bucket_name, connection): - return storage.create_bucket(bucket_name, PROJECT_ID, - connection=connection) diff --git a/gcloud/storage/demo/demo.py b/gcloud/storage/demo/demo.py index 93d8766f4049..ddad71f59dfb 100644 --- a/gcloud/storage/demo/demo.py +++ b/gcloud/storage/demo/demo.py @@ -16,25 +16,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Let's start by importing the demo module and getting a connection: +# Let's start by importing the demo module and getting a client: import time from gcloud import storage from gcloud.storage import demo -connection = storage.get_connection() +client = storage.Client(project=demo.PROJECT_ID) # OK, now let's look at all of the buckets... -print(list(demo.list_buckets(connection))) # This might take a second... +print(list(client.list_buckets())) # This might take a second... # Now let's create a new bucket... bucket_name = ("bucket-%s" % time.time()).replace(".", "") # Get rid of dots. print(bucket_name) -bucket = demo.create_bucket(bucket_name, connection) +bucket = client.create_bucket(bucket_name) print(bucket) # Let's look at all of the buckets again... -print(list(demo.list_buckets(connection))) +print(list(client.list_buckets())) # How about we create a new blob inside this bucket. blob = storage.Blob("my-new-file.txt", bucket=bucket) diff --git a/system_tests/storage.py b/system_tests/storage.py index 7ee7b3f3a217..dcfcbd444a3b 100644 --- a/system_tests/storage.py +++ b/system_tests/storage.py @@ -28,6 +28,7 @@ SHARED_BUCKETS = {} _helpers._PROJECT_ENV_VAR_NAME = 'GCLOUD_TESTS_PROJECT_ID' +CLIENT = storage.Client() def setUpModule(): @@ -36,7 +37,7 @@ def setUpModule(): bucket_name = 'new%d' % (1000 * time.time(),) # In the **very** rare case the bucket name is reserved, this # fails with a ConnectionError. - SHARED_BUCKETS['test_bucket'] = storage.create_bucket(bucket_name) + SHARED_BUCKETS['test_bucket'] = CLIENT.create_bucket(bucket_name) def tearDownModule(): @@ -57,12 +58,12 @@ def tearDown(self): def test_create_bucket(self): new_bucket_name = 'a-new-bucket' self.assertRaises(exceptions.NotFound, - storage.get_bucket, new_bucket_name) - created = storage.create_bucket(new_bucket_name) + CLIENT.get_bucket, new_bucket_name) + created = CLIENT.create_bucket(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) - def test_get_buckets(self): + def test_list_buckets(self): buckets_to_create = [ 'new%d' % (1000 * time.time(),), 'newer%d' % (1000 * time.time(),), @@ -70,11 +71,11 @@ def test_get_buckets(self): ] created_buckets = [] for bucket_name in buckets_to_create: - bucket = storage.create_bucket(bucket_name) + bucket = CLIENT.create_bucket(bucket_name) self.case_buckets_to_delete.append(bucket_name) # Retrieve the buckets. - all_buckets = storage.list_buckets() + all_buckets = CLIENT.list_buckets() created_buckets = [bucket for bucket in all_buckets if bucket.name in buckets_to_create] self.assertEqual(len(created_buckets), len(buckets_to_create))