diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index f75d866df456..0921458154e2 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -104,8 +104,11 @@ class Bucket(_PropertyMixin): """ _iterator_class = _BlobIterator - _MAX_OBJECTS_FOR_BUCKET_DELETE = 256 - """Maximum number of existing objects allowed in Bucket.delete().""" + _MAX_OBJECTS_FOR_ITERATION = 256 + """Maximum number of existing objects allowed in iteration. + + This is used in Bucket.delete() and Bucket.make_public(). + """ def __init__(self, name=None, connection=None): super(Bucket, self).__init__(name=name) @@ -362,15 +365,15 @@ def delete(self, force=False, connection=None): connection = _require_connection(connection) if force: blobs = list(self.list_blobs( - max_results=self._MAX_OBJECTS_FOR_BUCKET_DELETE + 1, + max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, connection=connection)) - if len(blobs) > self._MAX_OBJECTS_FOR_BUCKET_DELETE: + if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: message = ( 'Refusing to delete bucket with more than ' '%d objects. If you actually want to delete ' 'this bucket, please delete the objects ' 'yourself before calling Bucket.delete().' - ) % (self._MAX_OBJECTS_FOR_BUCKET_DELETE,) + ) % (self._MAX_OBJECTS_FOR_ITERATION,) raise ValueError(message) # Ignore 404 errors on delete. @@ -862,6 +865,10 @@ def disable_website(self): def make_public(self, recursive=False, future=False, connection=None): """Make a bucket public. + If ``recursive=True`` and the bucket contains more than 256 + objects / blobs this will cowardly refuse to make the objects public. + This is to prevent extremely long runtime of this method. + :type recursive: boolean :param recursive: If True, this will make all blobs inside the bucket public as well. @@ -888,7 +895,19 @@ def make_public(self, recursive=False, future=False, connection=None): doa.save(connection=connection) if recursive: - for blob in self.list_blobs(projection='full', - connection=connection): + blobs = list(self.list_blobs( + projection='full', + max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, + connection=connection)) + if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: + message = ( + 'Refusing to make public recursively with more than ' + '%d objects. If you actually want to make every object ' + 'in this bucket public, please do it on the objects ' + 'yourself.' + ) % (self._MAX_OBJECTS_FOR_ITERATION,) + raise ValueError(message) + + for blob in blobs: blob.acl.all().grant_read() blob.acl.save(connection=connection) diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 55030739a388..ab5dfc274673 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -447,7 +447,7 @@ def test_delete_explicit_too_many(self): bucket = self._makeOne(NAME, connection) # Make the Bucket refuse to delete with 2 objects. - bucket._MAX_OBJECTS_FOR_BUCKET_DELETE = 1 + bucket._MAX_OBJECTS_FOR_ITERATION = 1 self.assertRaises(ValueError, bucket.delete, force=True, connection=connection) self.assertEqual(connection._deleted_buckets, []) @@ -1044,7 +1044,34 @@ def get_items_from_response(self, response): self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) self.assertEqual(kw[1]['method'], 'GET') self.assertEqual(kw[1]['path'], '/b/%s/o' % NAME) - self.assertEqual(kw[1]['query_params'], {'projection': 'full'}) + max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 + self.assertEqual(kw[1]['query_params'], + {'maxResults': max_results, 'projection': 'full'}) + + def test_make_public_recursive_too_many(self): + from gcloud.storage.acl import _ACLEntity + + PERMISSIVE = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] + AFTER = {'acl': PERMISSIVE, 'defaultObjectAcl': []} + + NAME = 'name' + BLOB_NAME1 = 'blob-name1' + BLOB_NAME2 = 'blob-name2' + GET_BLOBS_RESP = { + 'items': [ + {'name': BLOB_NAME1}, + {'name': BLOB_NAME2}, + ], + } + connection = _Connection(AFTER, GET_BLOBS_RESP) + bucket = self._makeOne(NAME, connection) + bucket.acl.loaded = True + bucket.default_object_acl.loaded = True + + # Make the Bucket refuse to make_public with 2 objects. + bucket._MAX_OBJECTS_FOR_ITERATION = 1 + self.assertRaises(ValueError, bucket.make_public, recursive=True, + connection=connection) class _Connection(object):