Updated Branches: refs/heads/trunk 77eae241e -> af7352c2e
Make the following changes to the S3 driver: - Add ACL support for uploads - Add support for AWS tokens - Modify get_container to use a more efficient HEAD approach Closes #223. Signed-off-by: Tomaz Muraus <[email protected]> Project: http://git-wip-us.apache.org/repos/asf/libcloud/repo Commit: http://git-wip-us.apache.org/repos/asf/libcloud/commit/701de69b Tree: http://git-wip-us.apache.org/repos/asf/libcloud/tree/701de69b Diff: http://git-wip-us.apache.org/repos/asf/libcloud/diff/701de69b Branch: refs/heads/trunk Commit: 701de69b905549f28c490b0e71a091c198cdc963 Parents: 77eae24 Author: Noah Kantrowitz <[email protected]> Authored: Wed Jan 15 16:50:14 2014 -0800 Committer: Tomaz Muraus <[email protected]> Committed: Fri Jan 17 04:47:14 2014 +0100 ---------------------------------------------------------------------- libcloud/common/aws.py | 41 ++++++++++++- libcloud/storage/drivers/google_storage.py | 4 +- libcloud/storage/drivers/s3.py | 47 ++++++++++----- libcloud/test/storage/test_google_storage.py | 4 ++ libcloud/test/storage/test_s3.py | 71 +++++++++++++++++++++-- 5 files changed, 143 insertions(+), 24 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/libcloud/blob/701de69b/libcloud/common/aws.py ---------------------------------------------------------------------- diff --git a/libcloud/common/aws.py b/libcloud/common/aws.py index 868b5eb..124a434 100644 --- a/libcloud/common/aws.py +++ b/libcloud/common/aws.py @@ -19,7 +19,7 @@ import time from hashlib import sha256 from xml.etree import ElementTree as ET -from libcloud.common.base import ConnectionUserAndKey, XmlResponse +from libcloud.common.base import ConnectionUserAndKey, XmlResponse, BaseDriver from libcloud.common.types import InvalidCredsError, MalformedResponseError from libcloud.utils.py3 import b, httplib, urlquote from libcloud.utils.xml import findtext, findall @@ -105,7 +105,29 @@ class AWSGenericResponse(AWSBaseResponse): return "\n".join(msgs) -class SignedAWSConnection(ConnectionUserAndKey): +class AWSTokenConnection(ConnectionUserAndKey): + def __init__(self, user_id, key, secure=True, + host=None, port=None, url=None, timeout=None, token=None): + self.token = token + super(AWSTokenConnection, self).__init__(user_id, key, secure=secure, + host=host, port=port, url=url, + timeout=timeout) + + def add_default_params(self, params): + # Even though we are adding it to the headers, we need it here too + # so that the token is added to the signature. + if self.token: + params['x-amz-security-token'] = self.token + return super(AWSTokenConnection, self).add_default_params(params) + + def add_default_headers(self, headers): + if self.token: + headers['x-amz-security-token'] = self.token + return super(AWSTokenConnection, self).add_default_headers(headers) + + +class SignedAWSConnection(AWSTokenConnection): + def add_default_params(self, params): params['SignatureVersion'] = '2' params['SignatureMethod'] = 'HmacSHA256' @@ -150,3 +172,18 @@ class SignedAWSConnection(ConnectionUserAndKey): ) return b64_hmac.decode('utf-8') + + +class AWSDriver(BaseDriver): + def __init__(self, key, secret=None, secure=True, host=None, port=None, + api_version=None, region=None, token=None, **kwargs): + self.token = token + super(AWSDriver, self).__init__(key, secret=secret, secure=secure, + host=host, port=port, + api_version=api_version, region=region, + token=token, **kwargs) + + def _ex_connection_class_kwargs(self): + kwargs = super(AWSDriver, self)._ex_connection_class_kwargs() + kwargs['token'] = self.token + return kwargs http://git-wip-us.apache.org/repos/asf/libcloud/blob/701de69b/libcloud/storage/drivers/google_storage.py ---------------------------------------------------------------------- diff --git a/libcloud/storage/drivers/google_storage.py b/libcloud/storage/drivers/google_storage.py index 4043790..6dd6054 100644 --- a/libcloud/storage/drivers/google_storage.py +++ b/libcloud/storage/drivers/google_storage.py @@ -24,7 +24,7 @@ from libcloud.utils.py3 import b from libcloud.common.base import ConnectionUserAndKey -from libcloud.storage.drivers.s3 import S3StorageDriver, S3Response +from libcloud.storage.drivers.s3 import BaseS3StorageDriver, S3Response from libcloud.storage.drivers.s3 import S3RawResponse SIGNATURE_IDENTIFIER = 'GOOG1' @@ -126,7 +126,7 @@ class GoogleStorageConnection(ConnectionUserAndKey): return b64_hmac.decode('utf-8') -class GoogleStorageDriver(S3StorageDriver): +class GoogleStorageDriver(BaseS3StorageDriver): name = 'Google Storage' website = 'http://cloud.google.com/' connectionCls = GoogleStorageConnection http://git-wip-us.apache.org/repos/asf/libcloud/blob/701de69b/libcloud/storage/drivers/s3.py ---------------------------------------------------------------------- diff --git a/libcloud/storage/drivers/s3.py b/libcloud/storage/drivers/s3.py index b89d4be..0dd6694 100644 --- a/libcloud/storage/drivers/s3.py +++ b/libcloud/storage/drivers/s3.py @@ -32,7 +32,7 @@ from libcloud.utils.xml import fixxpath, findtext from libcloud.utils.files import read_in_chunks from libcloud.common.types import InvalidCredsError, LibcloudError from libcloud.common.base import ConnectionUserAndKey, RawResponse -from libcloud.common.aws import AWSBaseResponse +from libcloud.common.aws import AWSBaseResponse, AWSDriver, AWSTokenConnection from libcloud.storage.base import Object, Container, StorageDriver from libcloud.storage.types import ContainerIsNotEmptyError @@ -87,9 +87,9 @@ class S3RawResponse(S3Response, RawResponse): pass -class S3Connection(ConnectionUserAndKey): +class BaseS3Connection(ConnectionUserAndKey): """ - Repersents a single connection to the EC2 Endpoint + Represents a single connection to the S3 Endpoint """ host = 's3.amazonaws.com' @@ -172,6 +172,14 @@ class S3Connection(ConnectionUserAndKey): return b64_hmac.decode('utf-8') +class S3Connection(AWSTokenConnection, BaseS3Connection): + """ + Represents a single connection to the S3 endpoint, with AWS-specific + features. + """ + pass + + class S3MultipartUpload(object): """ Class representing an amazon s3 multipart upload @@ -206,10 +214,10 @@ class S3MultipartUpload(object): return ('<S3MultipartUpload: key=%s>' % (self.key)) -class S3StorageDriver(StorageDriver): +class BaseS3StorageDriver(StorageDriver): name = 'Amazon S3 (standard)' website = 'http://aws.amazon.com/s3/' - connectionCls = S3Connection + connectionCls = BaseS3Connection hash_type = 'md5' supports_chunked_encoding = False supports_s3_multipart_upload = True @@ -286,16 +294,17 @@ class S3StorageDriver(StorageDriver): yield obj def get_container(self, container_name): - # This is very inefficient, but afaik it's the only way to do it - containers = self.list_containers() - try: - container = [c for c in containers if c.name == container_name][0] - except IndexError: - raise ContainerDoesNotExistError(value=None, driver=self, - container_name=container_name) - - return container + response = self.connection.request('/%s' % container_name, + method='HEAD') + if response.status == httplib.NOT_FOUND: + raise ContainerDoesNotExistError(value=None, driver=self, + container_name=container_name) + except InvalidCredsError: + # This just means the user doesn't have IAM permissions to do a + # HEAD request but other requests might work. + pass + return Container(name=container_name, extra=None, driver=self) def get_object(self, container_name, object_name): container = self.get_container(container_name=container_name) @@ -787,12 +796,16 @@ class S3StorageDriver(StorageDriver): content_type = extra.get('content_type', None) meta_data = extra.get('meta_data', None) + acl = extra.get('acl', None) if meta_data: for key, value in list(meta_data.items()): key = 'x-amz-meta-%s' % (key) headers[key] = value + if acl: + headers['x-amz-acl'] = acl + request_path = self._get_object_path(container, object_name) if query_args: @@ -821,7 +834,7 @@ class S3StorageDriver(StorageDriver): elif response.status == httplib.OK: obj = Object( name=object_name, size=bytes_transferred, hash=server_hash, - extra=None, meta_data=meta_data, container=container, + extra={'acl': acl}, meta_data=meta_data, container=container, driver=self) return obj @@ -904,6 +917,10 @@ class S3StorageDriver(StorageDriver): return obj +class S3StorageDriver(AWSDriver, BaseS3StorageDriver): + connectionCls = S3Connection + + class S3USWestConnection(S3Connection): host = S3_US_WEST_HOST http://git-wip-us.apache.org/repos/asf/libcloud/blob/701de69b/libcloud/test/storage/test_google_storage.py ---------------------------------------------------------------------- diff --git a/libcloud/test/storage/test_google_storage.py b/libcloud/test/storage/test_google_storage.py index a6cc847..ed6dd57 100644 --- a/libcloud/test/storage/test_google_storage.py +++ b/libcloud/test/storage/test_google_storage.py @@ -36,6 +36,10 @@ class GoogleStorageTests(S3Tests): # TODO pass + def test_token(self): + # Not supported on Google Storage + pass + if __name__ == '__main__': sys.exit(unittest.main()) http://git-wip-us.apache.org/repos/asf/libcloud/blob/701de69b/libcloud/test/storage/test_s3.py ---------------------------------------------------------------------- diff --git a/libcloud/test/storage/test_s3.py b/libcloud/test/storage/test_s3.py index b7e24ad..5276b26 100644 --- a/libcloud/test/storage/test_s3.py +++ b/libcloud/test/storage/test_s3.py @@ -67,6 +67,14 @@ class S3MockHttp(StorageMockHttp, MockHttpTestCase): self.base_headers, httplib.responses[httplib.OK]) + def _list_containers_TOKEN(self, method, url, body, headers): + self.assertEqual(headers['x-amz-security-token'], 'asdf') + body = self.fixtures.load('list_containers_empty.xml') + return (httplib.OK, + body, + self.base_headers, + httplib.responses[httplib.OK]) + def _list_containers(self, method, url, body, headers): body = self.fixtures.load('list_containers.xml') return (httplib.OK, @@ -101,7 +109,14 @@ class S3MockHttp(StorageMockHttp, MockHttpTestCase): self.base_headers, httplib.responses[httplib.OK]) - def _test2_test_list_containers(self, method, url, body, headers): + def _test2_get_object(self, method, url, body, headers): + body = self.fixtures.load('list_container_objects.xml') + return (httplib.OK, + body, + self.base_headers, + httplib.responses[httplib.OK]) + + def _test2_test_get_object(self, method, url, body, headers): # test_get_object body = self.fixtures.load('list_containers.xml') headers = {'content-type': 'application/zip', @@ -157,6 +172,25 @@ class S3MockHttp(StorageMockHttp, MockHttpTestCase): headers, httplib.responses[httplib.OK]) + def _test1_get_container(self, method, url, body, headers): + body = self.fixtures.load('list_container_objects.xml') + return (httplib.OK, + body, + self.base_headers, + httplib.responses[httplib.OK]) + + def _container1_get_container(self, method, url, body, headers): + return (httplib.NOT_FOUND, + '', + self.base_headers, + httplib.responses[httplib.NOT_FOUND]) + + def _test_inexistent_get_object(self, method, url, body, headers): + return (httplib.NOT_FOUND, + '', + self.base_headers, + httplib.responses[httplib.NOT_FOUND]) + def _foo_bar_container(self, method, url, body, headers): # test_delete_container return (httplib.NO_CONTENT, @@ -415,6 +449,11 @@ class S3Tests(unittest.TestCase): else: self.fail('Exception was not thrown') + def test_token(self): + self.mock_response_klass.type = 'list_containers_TOKEN' + self.driver = self.driver_type(*self.driver_args, token='asdf') + self.driver.list_containers() + def test_bucket_is_located_in_different_region(self): self.mock_response_klass.type = 'DIFFERENT_REGION' try: @@ -487,7 +526,7 @@ class S3Tests(unittest.TestCase): self.assertTrue('owner' in obj.meta_data) def test_get_container_doesnt_exist(self): - self.mock_response_klass.type = 'list_containers' + self.mock_response_klass.type = 'get_container' try: self.driver.get_container(container_name='container1') except ContainerDoesNotExistError: @@ -496,14 +535,14 @@ class S3Tests(unittest.TestCase): self.fail('Exception was not thrown') def test_get_container_success(self): - self.mock_response_klass.type = 'list_containers' + self.mock_response_klass.type = 'get_container' container = self.driver.get_container(container_name='test1') self.assertTrue(container.name, 'test1') def test_get_object_container_doesnt_exist(self): # This method makes two requests which makes mocking the response a bit # trickier - self.mock_response_klass.type = 'list_containers' + self.mock_response_klass.type = 'get_object' try: self.driver.get_object(container_name='test-inexistent', object_name='test') @@ -515,7 +554,7 @@ class S3Tests(unittest.TestCase): def test_get_object_success(self): # This method makes two requests which makes mocking the response a bit # trickier - self.mock_response_klass.type = 'list_containers' + self.mock_response_klass.type = 'get_object' obj = self.driver.get_object(container_name='test2', object_name='test') @@ -750,6 +789,28 @@ class S3Tests(unittest.TestCase): self.assertTrue('some-value' in obj.meta_data) self.driver_type._upload_file = old_func + def test_upload_object_with_acl(self): + def upload_file(self, response, file_path, chunked=False, + calculate_hash=True): + return True, '0cc175b9c0f1b6a831c399e269772661', 1000 + + old_func = self.driver_type._upload_file + self.driver_type._upload_file = upload_file + file_path = os.path.abspath(__file__) + container = Container(name='foo_bar_container', extra={}, + driver=self.driver) + object_name = 'foo_test_upload' + extra = {'acl': 'public-read'} + obj = self.driver.upload_object(file_path=file_path, + container=container, + object_name=object_name, + extra=extra, + verify_hash=True) + self.assertEqual(obj.name, 'foo_test_upload') + self.assertEqual(obj.size, 1000) + self.assertEqual(obj.extra['acl'], 'public-read') + self.driver_type._upload_file = old_func + def test_upload_empty_object_via_stream(self): if self.driver.supports_s3_multipart_upload: self.mock_raw_response_klass.type = 'MULTIPART'
