Fix an exception which would get thrown if user tries to upload an empty object via multipart upload in the S3 driver.
Part of LIBCLOUD-490. Project: http://git-wip-us.apache.org/repos/asf/libcloud/repo Commit: http://git-wip-us.apache.org/repos/asf/libcloud/commit/26537d8d Tree: http://git-wip-us.apache.org/repos/asf/libcloud/tree/26537d8d Diff: http://git-wip-us.apache.org/repos/asf/libcloud/diff/26537d8d Branch: refs/heads/trunk Commit: 26537d8db34ca7e52246ea0d6ef26b99aa99c5b7 Parents: 5b4e2c0 Author: Tomaz Muraus <[email protected]> Authored: Mon Jan 6 15:06:24 2014 +0100 Committer: Tomaz Muraus <[email protected]> Committed: Mon Jan 6 15:20:50 2014 +0100 ---------------------------------------------------------------------- libcloud/storage/drivers/s3.py | 2 +- libcloud/test/storage/test_s3.py | 26 ++++++++++++++++++++++++-- libcloud/test/test_utils.py | 24 ++++++++++++++++++++++++ libcloud/utils/files.py | 10 +++++++++- 4 files changed, 58 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/storage/drivers/s3.py ---------------------------------------------------------------------- diff --git a/libcloud/storage/drivers/s3.py b/libcloud/storage/drivers/s3.py index dee2887..947b1d7 100644 --- a/libcloud/storage/drivers/s3.py +++ b/libcloud/storage/drivers/s3.py @@ -522,7 +522,7 @@ class S3StorageDriver(StorageDriver): # Read the input data in chunk sizes suitable for AWS for data in read_in_chunks(iterator, chunk_size=CHUNK_SIZE, - fill_size=True): + fill_size=True, yield_empty=True): bytes_transferred += len(data) if calculate_hash: http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/test/storage/test_s3.py ---------------------------------------------------------------------- diff --git a/libcloud/test/storage/test_s3.py b/libcloud/test/storage/test_s3.py index 4bab644..b7e24ad 100644 --- a/libcloud/test/storage/test_s3.py +++ b/libcloud/test/storage/test_s3.py @@ -244,6 +244,9 @@ class S3MockHttp(StorageMockHttp, MockHttpTestCase): self.assertEqual(part_no, str(count)) self.assertEqual(etag, headers['etag']) + # Make sure that manifest contains at least one part + self.assertTrue(count >= 1) + body = self.fixtures.load('complete_multipart.xml') return (httplib.OK, body, @@ -747,8 +750,28 @@ class S3Tests(unittest.TestCase): self.assertTrue('some-value' in obj.meta_data) self.driver_type._upload_file = old_func - def test_upload_small_object_via_stream(self): + def test_upload_empty_object_via_stream(self): + if self.driver.supports_s3_multipart_upload: + self.mock_raw_response_klass.type = 'MULTIPART' + self.mock_response_klass.type = 'MULTIPART' + else: + self.mock_raw_response_klass.type = None + self.mock_response_klass.type = None + container = Container(name='foo_bar_container', extra={}, + driver=self.driver) + object_name = 'foo_test_stream_data' + iterator = DummyIterator(data=['']) + extra = {'content_type': 'text/plain'} + obj = self.driver.upload_object_via_stream(container=container, + object_name=object_name, + iterator=iterator, + extra=extra) + + self.assertEqual(obj.name, object_name) + self.assertEqual(obj.size, 0) + + def test_upload_small_object_via_stream(self): if self.driver.supports_s3_multipart_upload: self.mock_raw_response_klass.type = 'MULTIPART' self.mock_response_klass.type = 'MULTIPART' @@ -770,7 +793,6 @@ class S3Tests(unittest.TestCase): self.assertEqual(obj.size, 3) def test_upload_big_object_via_stream(self): - if self.driver.supports_s3_multipart_upload: self.mock_raw_response_klass.type = 'MULTIPART' self.mock_response_klass.type = 'MULTIPART' http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/test/test_utils.py ---------------------------------------------------------------------- diff --git a/libcloud/test/test_utils.py b/libcloud/test/test_utils.py index cd8950b..1d900d4 100644 --- a/libcloud/test/test_utils.py +++ b/libcloud/test/test_utils.py @@ -40,6 +40,7 @@ from libcloud.utils.misc import get_secure_random_string from libcloud.utils.networking import is_public_subnet from libcloud.utils.networking import is_private_subnet from libcloud.utils.networking import is_valid_ip_address +from libcloud.storage.drivers.dummy import DummyIterator WARNINGS_BUFFER = [] @@ -148,6 +149,29 @@ class TestUtils(unittest.TestCase): libcloud.utils.in_development_warning('test_module') self.assertEqual(len(WARNINGS_BUFFER), 1) + def test_read_in_chunks_iterator_no_data(self): + iterator = DummyIterator() + generator1 = libcloud.utils.files.read_in_chunks(iterator=iterator, + yield_empty=False) + generator2 = libcloud.utils.files.read_in_chunks(iterator=iterator, + yield_empty=True) + + # yield_empty=False + count = 0 + for data in generator1: + count += 1 + self.assertEqual(data, b('')) + + self.assertEqual(count, 0) + + # yield_empty=True + count = 0 + for data in generator2: + count += 1 + self.assertEqual(data, b('')) + + self.assertEqual(count, 1) + def test_read_in_chunks_iterator(self): def iterator(): for x in range(0, 1000): http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/utils/files.py ---------------------------------------------------------------------- diff --git a/libcloud/utils/files.py b/libcloud/utils/files.py index 4d85c8b..a71e1c4 100644 --- a/libcloud/utils/files.py +++ b/libcloud/utils/files.py @@ -33,7 +33,8 @@ __all__ = [ ] -def read_in_chunks(iterator, chunk_size=None, fill_size=False): +def read_in_chunks(iterator, chunk_size=None, fill_size=False, + yield_empty=False): """ Return a generator which yields data in chunks. @@ -48,6 +49,10 @@ def read_in_chunks(iterator, chunk_size=None, fill_size=False): length (except for last chunk). :type fill_size: ``bool`` + :param yield_empty: If true and iterator returned no data, yield empty + bytes object before raising StopIteration. + :type yield_empty: ``bool`` + TODO: At some point in the future we could use byte arrays here if version >= Python 3. This should speed things up a bit and reduce memory usage. """ @@ -75,6 +80,9 @@ def read_in_chunks(iterator, chunk_size=None, fill_size=False): empty = True if len(data) == 0: + if empty and yield_empty: + yield b('') + raise StopIteration if fill_size:
