libcloud-notifications mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From anthonys...@apache.org
Subject libcloud git commit: GoogleStorageDriver can now use either our S3 authentication or other Google Cloud Platform OAuth2 authentication methods.
Date Fri, 27 Nov 2015 05:16:08 GMT
Repository: libcloud
Updated Branches:
  refs/heads/trunk 8f54b5f7b -> 3849f65fe


GoogleStorageDriver can now use either our S3 authentication or other Google Cloud Platform OAuth2 authentication methods.

GoogleBaseConnection allows for a GCS_S3 auth type now, but does not handle creating the S3 HMAC header. GoogleBaseConnection still handles OAuth2  for GCE, IA, and SA auth types. Also did some minor cleanup to GoogleBaseConnection.

Refactored S3 HMAC auth signature creation method a bit. Refactored GoogleStorageConnection to use S3's signature method instead of reinventing it. Created a test for S3's HMAC signature method.

Changed an InvalidContainerNameError to ContainerError in S3 create_container. The exception was being raised on ANY 400 error, which can be returned for things other than an invalid name. In other words, the exception was a misnomer.

Added tests for new logic.

Did other minor cleanup.

Tests run:
Some manual tests putting, getting, and deleting objects/buckets. I used a Service Account, Installed App creds, and S3 interoperability creds.
python setup.py test
tox -e lint

Signed-off-by: Anthony Shaw <anthony.p.shaw@gmail.com>
Closes #633


Project: http://git-wip-us.apache.org/repos/asf/libcloud/repo
Commit: http://git-wip-us.apache.org/repos/asf/libcloud/commit/3849f65f
Tree: http://git-wip-us.apache.org/repos/asf/libcloud/tree/3849f65f
Diff: http://git-wip-us.apache.org/repos/asf/libcloud/diff/3849f65f

Branch: refs/heads/trunk
Commit: 3849f65fe5289f7d7160b7537afa930c7e2ea8a8
Parents: 8f54b5f
Author: Scott Crunkleton <crunkleton@google.com>
Authored: Fri Oct 30 16:44:32 2015 -0700
Committer: Anthony Shaw <anthony.p.shaw@gmail.com>
Committed: Fri Nov 27 16:15:38 2015 +1100

----------------------------------------------------------------------
 libcloud/common/base.py                      |   4 +-
 libcloud/common/google.py                    | 208 ++++++++++--------
 libcloud/compute/base.py                     |   6 -
 libcloud/compute/drivers/gce.py              |   2 +-
 libcloud/storage/base.py                     |   6 -
 libcloud/storage/drivers/google_storage.py   | 181 ++++++++--------
 libcloud/storage/drivers/s3.py               |  88 ++++----
 libcloud/test/common/test_google.py          |  40 +++-
 libcloud/test/secrets.py-dist                |   3 +-
 libcloud/test/storage/test_google_storage.py | 249 ++++++++++++++++++++--
 libcloud/test/storage/test_s3.py             |  33 ++-
 11 files changed, 554 insertions(+), 266 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/common/base.py
----------------------------------------------------------------------
diff --git a/libcloud/common/base.py b/libcloud/common/base.py
index 4794cfc..d457e24 100644
--- a/libcloud/common/base.py
+++ b/libcloud/common/base.py
@@ -520,9 +520,7 @@ class Connection(object):
             (self.host, self.port, self.secure,
              self.request_path) = self._tuple_from_url(url)
 
-        if timeout is None:
-            timeout = self.__class__.timeout
-        self.timeout = timeout
+        self.timeout = timeout or self.timeout
         self.retry_delay = retry_delay
         self.backoff = backoff
         self.proxy_url = proxy_url

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/common/google.py
----------------------------------------------------------------------
diff --git a/libcloud/common/google.py b/libcloud/common/google.py
index 2856748..1977546 100644
--- a/libcloud/common/google.py
+++ b/libcloud/common/google.py
@@ -104,6 +104,10 @@ except ImportError:
 TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
 
 
+def _now():
+    return datetime.datetime.utcnow()
+
+
 def _is_gce():
     http_code, http_reason, body = _get_gce_metadata()
     if http_code == httplib.OK and body:
@@ -111,6 +115,15 @@ def _is_gce():
     return False
 
 
+def _is_gcs_s3(user_id):
+    """Checks S3 key format: 20 alphanumeric chars starting with GOOG."""
+    return len(user_id) == 20 and user_id.startswith('GOOG')
+
+
+def _is_sa(user_id):
+    return user_id.endswith('@developer.gserviceaccount.com')
+
+
 def _get_gce_metadata(path=''):
     try:
         url = "http://metadata/computeMetadata/v1/" + path.lstrip('/')
@@ -328,9 +341,6 @@ class GoogleBaseAuthConnection(ConnectionUserAndKey):
 
         super(GoogleBaseAuthConnection, self).__init__(user_id, key, **kwargs)
 
-    def _now(self):
-        return datetime.datetime.utcnow()
-
     def add_default_headers(self, headers):
         headers['Content-Type'] = "application/x-www-form-urlencoded"
         headers['Host'] = self.host
@@ -348,7 +358,7 @@ class GoogleBaseAuthConnection(ConnectionUserAndKey):
         :rtype:   ``dict``
         """
         data = urlencode(request_body)
-        now = self._now()
+        now = _now()
         try:
             response = self.request('/o/oauth2/token', method='POST',
                                     data=data)
@@ -545,12 +555,43 @@ class GoogleGCEServiceAcctAuthConnection(GoogleBaseAuthConnection):
                              "'%s'" % str(http_reason))
         token_info = json.loads(token_info)
         if 'expires_in' in token_info:
-            expire_time = self._now() + datetime.timedelta(
+            expire_time = _now() + datetime.timedelta(
                 seconds=token_info['expires_in'])
             token_info['expire_time'] = expire_time.strftime(TIMESTAMP_FORMAT)
         return token_info
 
 
+class GoogleAuthType(object):
+    """
+    SA (Service Account),
+    IA (Installed Application),
+    GCE (Auth from a GCE instance with service account enabled)
+    GCS_S3 (Cloud Storage S3 interoperability authentication)
+    """
+    SA = 'SA'
+    IA = 'IA'
+    GCE = 'GCE'
+    GCS_S3 = 'GCS_S3'
+
+    ALL_TYPES = [SA, IA, GCE, GCS_S3]
+    OAUTH2_TYPES = [SA, IA, GCE]
+
+    @classmethod
+    def guess_type(cls, user_id):
+        if _is_sa(user_id):
+            return cls.SA
+        elif _is_gce():
+            return cls.GCE
+        elif _is_gcs_s3(user_id):
+            return cls.GCS_S3
+        else:
+            return cls.IA
+
+    @classmethod
+    def is_oauth2(cls, auth_type):
+        return auth_type in cls.OAUTH2_TYPES
+
+
 class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
     """Base connection class for interacting with Google APIs."""
     driver = GoogleBaseDriver
@@ -558,6 +599,7 @@ class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
     host = 'www.googleapis.com'
     poll_interval = 2.0
     timeout = 180
+    credential_file = '~/.google_libcloud_auth'
 
     def __init__(self, user_id, key=None, auth_type=None,
                  credential_file=None, scopes=None, **kwargs):
@@ -574,13 +616,8 @@ class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
                      authentication.
         :type   key: ``str``
 
-        :keyword  auth_type: Accepted values are "SA" or "IA" or "GCE"
-                             ("Service Account" or "Installed Application" or
-                             "GCE" if libcloud is being used on a GCE instance
-                             with service account enabled).
-                             If not supplied, auth_type will be guessed based
-                             on value of user_id or if the code is being
-                             executed in a GCE instance.).
+        :keyword  auth_type: See GoogleAuthType class for list and description
+                             of accepted values.
                              If not supplied, auth_type will be guessed based
                              on value of user_id or if the code is running
                              on a GCE instance.
@@ -594,49 +631,24 @@ class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
                           read/write access to Compute, Storage, and DNS.
         :type     scopes: ``list``
         """
-        self.credential_file = credential_file or '~/.gce_libcloud_auth'
+        self.user_id = user_id
+        self.key = key
+        if auth_type and auth_type not in GoogleAuthType.ALL_TYPES:
+            raise GoogleAuthError('Invalid auth type: %s' % auth_type)
+        self.auth_type = auth_type or GoogleAuthType.guess_type(user_id)
 
-        if auth_type is None:
-            # Try to guess.
-            if '@' in user_id:
-                auth_type = 'SA'
-            elif _is_gce():
-                auth_type = 'GCE'
-            else:
-                auth_type = 'IA'
-
-        # Default scopes to read/write for compute, storage, and dns.  Can
-        # override this when calling get_driver() or setting in secrets.py
+        # OAuth2 stuff and placeholders
         self.scopes = scopes
-        if not self.scopes:
-            self.scopes = [
-                'https://www.googleapis.com/auth/compute',
-                'https://www.googleapis.com/auth/devstorage.full_control',
-                'https://www.googleapis.com/auth/ndev.clouddns.readwrite',
-            ]
-        self.token_info = self._get_token_info_from_file()
+        self.auth_conn = None
+        self.token_expire_time = None
+        self.token_info = None
+        if credential_file:
+            self.credential_file = credential_file
+        elif self.auth_type == GoogleAuthType.SA:
+            self.credential_file += '.' + user_id
 
-        if auth_type == 'GCE':
-            self.auth_conn = GoogleGCEServiceAcctAuthConnection(
-                user_id, self.scopes, **kwargs)
-        elif auth_type == 'SA':
-            if '@' not in user_id:
-                raise GoogleAuthError('Service Account auth requires a '
-                                      'valid email address')
-            self.auth_conn = GoogleServiceAcctAuthConnection(
-                user_id, key, self.scopes, **kwargs)
-        elif auth_type == 'IA':
-            self.auth_conn = GoogleInstalledAppAuthConnection(
-                user_id, key, self.scopes, **kwargs)
-        else:
-            raise GoogleAuthError('Invalid auth_type: %s' % str(auth_type))
-
-        if self.token_info is None:
-            self.token_info = self.auth_conn.get_new_token()
-            self._write_token_info_to_file()
-
-        self.token_expire_time = datetime.datetime.strptime(
-            self.token_info['expire_time'], TIMESTAMP_FORMAT)
+        if GoogleAuthType.is_oauth2(self.auth_type):
+            self._setup_oauth2(**kwargs)
 
         super(GoogleBaseConnection, self).__init__(user_id, key, **kwargs)
 
@@ -645,14 +657,11 @@ class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
         ver_platform = 'Python %s/%s' % (python_ver, sys.platform)
         self.user_agent_append(ver_platform)
 
-    def _now(self):
-        return datetime.datetime.utcnow()
-
     def add_default_headers(self, headers):
         """
         @inherits: :class:`Connection.add_default_headers`
         """
-        headers['Content-Type'] = "application/json"
+        headers['Content-Type'] = 'application/json'
         headers['Host'] = self.host
         return headers
 
@@ -663,7 +672,7 @@ class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
 
         @inherits: :class:`Connection.pre_connect_hook`
         """
-        now = self._now()
+        now = _now()
         if self.token_expire_time < now:
             self.token_info = self.auth_conn.refresh_token(self.token_info)
             self.token_expire_time = datetime.datetime.strptime(
@@ -699,33 +708,6 @@ class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
         # One more time, then give up.
         return super(GoogleBaseConnection, self).request(*args, **kwargs)
 
-    def _get_token_info_from_file(self):
-        """
-        Read credential file and return token information.
-
-        :return:  Token information dictionary, or None
-        :rtype:   ``dict`` or ``None``
-        """
-        token_info = None
-        filename = os.path.realpath(os.path.expanduser(self.credential_file))
-
-        try:
-            with open(filename, 'r') as f:
-                data = f.read()
-            token_info = json.loads(data)
-        except IOError:
-            pass
-        return token_info
-
-    def _write_token_info_to_file(self):
-        """
-        Write token_info to credential file.
-        """
-        filename = os.path.realpath(os.path.expanduser(self.credential_file))
-        data = json.dumps(self.token_info)
-        with open(filename, 'w') as f:
-            f.write(data)
-
     def has_completed(self, response):
         """
         Determine if operation has completed based on response.
@@ -768,3 +750,61 @@ class GoogleBaseConnection(ConnectionUserAndKey, PollingConnection):
         else:
             request = self.request_path + action
         return request
+
+    def _setup_oauth2(self, **kwargs):
+        # Default scopes to read/write for compute, storage, and dns.  Can
+        # override this when calling get_driver() or setting in secrets.py
+        if not self.scopes:
+            self.scopes = [
+                'https://www.googleapis.com/auth/compute',
+                'https://www.googleapis.com/auth/devstorage.full_control',
+                'https://www.googleapis.com/auth/ndev.clouddns.readwrite',
+            ]
+        self.token_info = self._get_token_info_from_file()
+
+        if self.auth_type == GoogleAuthType.GCE:
+            self.auth_conn = GoogleGCEServiceAcctAuthConnection(
+                self.user_id, self.scopes, **kwargs)
+        elif self.auth_type == GoogleAuthType.SA:
+            self.auth_conn = GoogleServiceAcctAuthConnection(
+                self.user_id, self.key, self.scopes, **kwargs)
+        elif self.auth_type == GoogleAuthType.IA:
+            self.auth_conn = GoogleInstalledAppAuthConnection(
+                self.user_id, self.key, self.scopes, **kwargs)
+        else:
+            raise GoogleAuthError('Invalid auth_type: %s' %
+                                  str(self.auth_type))
+
+        if self.token_info is None:
+            self.token_info = self.auth_conn.get_new_token()
+            self._write_token_info_to_file()
+
+        self.token_expire_time = datetime.datetime.strptime(
+            self.token_info['expire_time'], TIMESTAMP_FORMAT)
+
+    def _get_token_info_from_file(self):
+        """
+        Read credential file and return token information.
+
+        :return:  Token information dictionary, or None
+        :rtype:   ``dict`` or ``None``
+        """
+        token_info = None
+        filename = os.path.realpath(os.path.expanduser(self.credential_file))
+
+        try:
+            with open(filename, 'r') as f:
+                data = f.read()
+            token_info = json.loads(data)
+        except IOError:
+            pass
+        return token_info
+
+    def _write_token_info_to_file(self):
+        """
+        Write token_info to credential file.
+        """
+        filename = os.path.realpath(os.path.expanduser(self.credential_file))
+        data = json.dumps(self.token_info)
+        with open(filename, 'w') as f:
+            f.write(data)

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/compute/base.py
----------------------------------------------------------------------
diff --git a/libcloud/compute/base.py b/libcloud/compute/base.py
index bac4787..2b9b18a 100644
--- a/libcloud/compute/base.py
+++ b/libcloud/compute/base.py
@@ -668,12 +668,6 @@ class NodeDriver(BaseDriver):
 
     NODE_STATE_MAP = {}
 
-    def __init__(self, key, secret=None, secure=True, host=None, port=None,
-                 api_version=None, **kwargs):
-        super(NodeDriver, self).__init__(key=key, secret=secret, secure=secure,
-                                         host=host, port=port,
-                                         api_version=api_version, **kwargs)
-
     def list_nodes(self):
         """
         List all nodes.

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/compute/drivers/gce.py
----------------------------------------------------------------------
diff --git a/libcloud/compute/drivers/gce.py b/libcloud/compute/drivers/gce.py
index b74b2a7..8066679 100644
--- a/libcloud/compute/drivers/gce.py
+++ b/libcloud/compute/drivers/gce.py
@@ -1047,7 +1047,7 @@ class GCENodeDriver(NodeDriver):
         self.project = project
         self.scopes = scopes
         self.credential_file = credential_file or \
-            '~/.gce_libcloud_auth' + '.' + self.project
+            GCEConnection.credential_file + '.' + self.project
 
         super(GCENodeDriver, self).__init__(user_id, key, **kwargs)
 

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/storage/base.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/base.py b/libcloud/storage/base.py
index ead1545..ba53b37 100644
--- a/libcloud/storage/base.py
+++ b/libcloud/storage/base.py
@@ -190,12 +190,6 @@ class StorageDriver(BaseDriver):
     # provided and none can be detected when uploading an object
     strict_mode = False
 
-    def __init__(self, key, secret=None, secure=True, host=None, port=None,
-                 **kwargs):
-        super(StorageDriver, self).__init__(key=key, secret=secret,
-                                            secure=secure, host=host,
-                                            port=port, **kwargs)
-
     def iterate_containers(self):
         """
         Return a generator of containers for the given account

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/storage/drivers/google_storage.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/drivers/google_storage.py b/libcloud/storage/drivers/google_storage.py
index 0bc9583..8646556 100644
--- a/libcloud/storage/drivers/google_storage.py
+++ b/libcloud/storage/drivers/google_storage.py
@@ -13,120 +13,119 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import base64
 import copy
-import hmac
 
-from email.utils import formatdate
-from hashlib import sha1
+import email.utils
 
-from libcloud.utils.py3 import b
-
-from libcloud.common.base import ConnectionUserAndKey
-
-from libcloud.storage.drivers.s3 import BaseS3StorageDriver, S3Response
+from libcloud.common.google import GoogleAuthType
+from libcloud.common.google import GoogleBaseConnection
+from libcloud.storage.drivers.s3 import BaseS3Connection
+from libcloud.storage.drivers.s3 import BaseS3StorageDriver
 from libcloud.storage.drivers.s3 import S3RawResponse
+from libcloud.storage.drivers.s3 import S3Response
 
+# Docs are a lie. Actual namespace returned is different that the one listed
+# in the docs.
 SIGNATURE_IDENTIFIER = 'GOOG1'
-
-# Docs are a lie. Actual namespace returned is different that the one listed in
-# the docs.
-AUTH_HOST = 'commondatastorage.googleapis.com'
 API_VERSION = '2006-03-01'
 NAMESPACE = 'http://doc.s3.amazonaws.com/%s' % (API_VERSION)
 
 
-class GoogleStorageConnection(ConnectionUserAndKey):
+class GoogleStorageConnection(GoogleBaseConnection):
     """
-    Repersents a single connection to the Google storage API endpoint.
+    Represents a single connection to the Google storage API endpoint.
+
+    This can either authenticate via the Google OAuth2 methods or via
+    the S3 interoperability method.
     """
 
-    host = AUTH_HOST
+    host = 'storage.googleapis.com'
     responseCls = S3Response
     rawResponseCls = S3RawResponse
+    PROJECT_ID_HEADER = 'x-goog-project-id'
+
+    def __init__(self, user_id, key, secure, auth_type=None,
+                 credential_file=None, **kwargs):
+        super(GoogleStorageConnection, self).__init__(
+            user_id, key, secure=secure, auth_type=auth_type,
+            credential_file=credential_file, **kwargs)
 
     def add_default_headers(self, headers):
-        date = formatdate(usegmt=True)
-        headers['Date'] = date
+        if self.auth_type == GoogleAuthType.GCS_S3:
+            date = email.utils.formatdate(usegmt=True)
+            headers['Date'] = date
+        else:
+            headers = super(GoogleStorageConnection,
+                            self).add_default_headers(headers)
+        project = self.get_project()
+        if project:
+            headers[self.PROJECT_ID_HEADER] = project
         return headers
 
+    def encode_data(self, data):
+        if self.auth_type == GoogleAuthType.GCS_S3:
+            return data
+        return super(GoogleStorageConnection, self).encode_data(data)
+
+    def get_project(self):
+        return getattr(self.driver, 'project')
+
     def pre_connect_hook(self, params, headers):
-        signature = self._get_aws_auth_param(method=self.method,
-                                             headers=headers,
-                                             params=params,
-                                             expires=None,
-                                             secret_key=self.key,
-                                             path=self.action)
-        headers['Authorization'] = '%s %s:%s' % (SIGNATURE_IDENTIFIER,
-                                                 self.user_id, signature)
+        if self.auth_type == GoogleAuthType.GCS_S3:
+            signature = self._get_s3_auth_signature(params, headers)
+            headers['Authorization'] = '%s %s:%s' % (SIGNATURE_IDENTIFIER,
+                                                     self.user_id, signature)
+        else:
+            params, headers = super(GoogleStorageConnection,
+                                    self).pre_connect_hook(params, headers)
         return params, headers
 
-    def _get_aws_auth_param(self, method, headers, params, expires,
-                            secret_key, path='/'):
-        # TODO: Refactor and re-use in S3 driver
-        """
-        Signature = URL-Encode( Base64( HMAC-SHA1( YourSecretAccessKeyID,
-                                UTF-8-Encoding-Of( StringToSign ) ) ) );
-
-        StringToSign = HTTP-VERB + "\n" +
-            Content-MD5 + "\n" +
-            Content-Type + "\n" +
-            Date + "\n" +
-            CanonicalizedHeaders +
-            CanonicalizedResource;
-        """
-        special_header_keys = ['content-md5', 'content-type', 'date']
-        special_header_values = {}
-        extension_header_values = {}
-
-        headers_copy = copy.deepcopy(headers)
-        for key, value in list(headers_copy.items()):
-            if key.lower() in special_header_keys:
-                if key.lower() == 'date':
-                    value = value.strip()
-                else:
-                    value = value.lower().strip()
-                special_header_values[key.lower()] = value
-            elif key.lower().startswith('x-goog-'):
-                extension_header_values[key.lower()] = value.strip()
-
-        if 'content-md5' not in special_header_values:
-            special_header_values['content-md5'] = ''
-
-        if 'content-type' not in special_header_values:
-            special_header_values['content-type'] = ''
-
-        keys_sorted = list(special_header_values.keys())
-        keys_sorted.sort()
-
-        buf = [method]
-        for key in keys_sorted:
-            value = special_header_values[key]
-            buf.append(value)
-        string_to_sign = '\n'.join(buf)
-
-        keys_sorted = list(extension_header_values.keys())
-        keys_sorted.sort()
-
-        extension_header_string = []
-        for key in keys_sorted:
-            value = extension_header_values[key]
-            extension_header_string.append('%s:%s' % (key, value))
-        extension_header_string = '\n'.join(extension_header_string)
-
-        values_to_sign = []
-        for value in [string_to_sign, extension_header_string, path]:
-            if value:
-                values_to_sign.append(value)
-
-        string_to_sign = '\n'.join(values_to_sign)
-        b64_hmac = base64.b64encode(
-            hmac.new(b(secret_key), b(string_to_sign), digestmod=sha1).digest()
-        )
-        return b64_hmac.decode('utf-8')
+    def _get_s3_auth_signature(self, params, headers):
+        """Hacky wrapper to work with S3's get_auth_signature."""
+        headers_copy = {}
+        params_copy = copy.deepcopy(params)
+
+        # Lowercase all headers except 'date' and Google header values
+        for k, v in headers.items():
+            k_lower = k.lower()
+            if (k_lower == 'date' or k_lower.startswith(
+                    GoogleStorageDriver.http_vendor_prefix) or
+                    not isinstance(v, str)):
+                headers_copy[k_lower] = v
+            else:
+                headers_copy[k_lower] = v.lower()
+
+        return BaseS3Connection.get_auth_signature(
+            method=self.method,
+            headers=headers_copy,
+            params=params_copy,
+            expires=None,
+            secret_key=self.key,
+            path=self.action,
+            vendor_prefix=GoogleStorageDriver.http_vendor_prefix)
 
 
 class GoogleStorageDriver(BaseS3StorageDriver):
+    """
+    Driver for Google Cloud Storage.
+
+    Can authenticate via standard Google Cloud methods (Service Accounts,
+    Installed App credentials, and GCE instance service accounts)
+
+    Examples:
+    Service Accounts
+        driver = GoogleStorageDriver(key=client_email, secret=private_key, ...)
+    Installed Application
+        driver = GoogleStorageDriver(key=client_id, secret=client_secret, ...)
+    From GCE instance
+        driver = GoogleStorageDriver(key=foo , secret=bar, ...)
+
+    Can also authenticate via Google Cloud Storage's S3 interoperability API.
+    S3 user keys are 20 alphanumeric characters, starting with GOOG
+    Example:
+        driver = GoogleStorageDriver(key='GOOG0123456789ABCXYZ',
+                                     secret=key_secret)
+    """
     name = 'Google Storage'
     website = 'http://cloud.google.com/'
     connectionCls = GoogleStorageConnection
@@ -135,3 +134,7 @@ class GoogleStorageDriver(BaseS3StorageDriver):
     supports_chunked_encoding = False
     supports_s3_multipart_upload = False
     http_vendor_prefix = 'x-goog'
+
+    def __init__(self, key, secret=None, project=None, **kwargs):
+        self.project = project
+        super(GoogleStorageDriver, self).__init__(key, secret, **kwargs)

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/storage/drivers/s3.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/drivers/s3.py b/libcloud/storage/drivers/s3.py
index 7a3fa7c..f785b7f 100644
--- a/libcloud/storage/drivers/s3.py
+++ b/libcloud/storage/drivers/s3.py
@@ -13,10 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import time
-import copy
 import base64
 import hmac
+import time
 import sys
 
 from hashlib import sha1
@@ -39,6 +38,7 @@ from libcloud.common.base import ConnectionUserAndKey, RawResponse
 from libcloud.common.aws import AWSBaseResponse, AWSDriver, AWSTokenConnection
 
 from libcloud.storage.base import Object, Container, StorageDriver
+from libcloud.storage.types import ContainerError
 from libcloud.storage.types import ContainerIsNotEmptyError
 from libcloud.storage.types import InvalidContainerNameError
 from libcloud.storage.types import ContainerDoesNotExistError
@@ -101,20 +101,9 @@ class BaseS3Connection(ConnectionUserAndKey):
     responseCls = S3Response
     rawResponseCls = S3RawResponse
 
-    def add_default_params(self, params):
-        expires = str(int(time.time()) + EXPIRATION_SECONDS)
-        params['AWSAccessKeyId'] = self.user_id
-        params['Expires'] = expires
-        return params
-
-    def pre_connect_hook(self, params, headers):
-        params['Signature'] = self._get_aws_auth_param(
-            method=self.method, headers=headers, params=params,
-            expires=params['Expires'], secret_key=self.key, path=self.action)
-        return params, headers
-
-    def _get_aws_auth_param(self, method, headers, params, expires,
-                            secret_key, path='/'):
+    @staticmethod
+    def get_auth_signature(method, headers, params, expires, secret_key, path,
+                           vendor_prefix):
         """
         Signature = URL-Encode( Base64( HMAC-SHA1( YourSecretAccessKeyID,
                                     UTF-8-Encoding-Of( StringToSign ) ) ) );
@@ -123,50 +112,34 @@ class BaseS3Connection(ConnectionUserAndKey):
             Content-MD5 + "\n" +
             Content-Type + "\n" +
             Expires + "\n" +
-            CanonicalizedAmzHeaders +
+            CanonicalizedVendorHeaders +
             CanonicalizedResource;
         """
-        special_header_keys = ['content-md5', 'content-type', 'date']
-        special_header_values = {'date': ''}
-        amz_header_values = {}
+        special_headers = {'content-md5': '', 'content-type': '', 'date': ''}
+        vendor_headers = {}
 
-        headers_copy = copy.deepcopy(headers)
-        for key, value in list(headers_copy.items()):
+        for key, value in list(headers.items()):
             key_lower = key.lower()
-            if key_lower in special_header_keys:
-                special_header_values[key_lower] = value.strip()
-            elif key_lower.startswith('x-amz-'):
-                amz_header_values[key.lower()] = value.strip()
-
-        if 'content-md5' not in special_header_values:
-            special_header_values['content-md5'] = ''
-
-        if 'content-type' not in special_header_values:
-            special_header_values['content-type'] = ''
+            if key_lower in special_headers:
+                special_headers[key_lower] = value.strip()
+            elif key_lower.startswith(vendor_prefix):
+                vendor_headers[key_lower] = value.strip()
 
         if expires:
-            special_header_values['date'] = str(expires)
-
-        keys_sorted = list(special_header_values.keys())
-        keys_sorted.sort()
+            special_headers['date'] = str(expires)
 
         buf = [method]
-        for key in keys_sorted:
-            value = special_header_values[key]
+        for _, value in sorted(special_headers.items()):
             buf.append(value)
         string_to_sign = '\n'.join(buf)
 
-        keys_sorted = list(amz_header_values.keys())
-        keys_sorted.sort()
-
-        amz_header_string = []
-        for key in keys_sorted:
-            value = amz_header_values[key]
-            amz_header_string.append('%s:%s' % (key, value))
-        amz_header_string = '\n'.join(amz_header_string)
+        buf = []
+        for key, value in sorted(vendor_headers.items()):
+            buf.append('%s:%s' % (key, value))
+        header_string = '\n'.join(buf)
 
         values_to_sign = []
-        for value in [string_to_sign, amz_header_string, path]:
+        for value in [string_to_sign, header_string, path]:
             if value:
                 values_to_sign.append(value)
 
@@ -176,6 +149,19 @@ class BaseS3Connection(ConnectionUserAndKey):
         )
         return b64_hmac.decode('utf-8')
 
+    def add_default_params(self, params):
+        expires = str(int(time.time()) + EXPIRATION_SECONDS)
+        params['AWSAccessKeyId'] = self.user_id
+        params['Expires'] = expires
+        return params
+
+    def pre_connect_hook(self, params, headers):
+        params['Signature'] = self.get_auth_signature(
+            method=self.method, headers=headers, params=params,
+            expires=params['Expires'], secret_key=self.key, path=self.action,
+            vendor_prefix=self.driver.http_vendor_prefix)
+        return params, headers
+
 
 class S3Connection(AWSTokenConnection, BaseS3Connection):
     """
@@ -379,10 +365,10 @@ class BaseS3StorageDriver(StorageDriver):
                       'be unique among all the containers in the system',
                 container_name=container_name, driver=self)
         elif response.status == httplib.BAD_REQUEST:
-            raise InvalidContainerNameError(value='Container name contains ' +
-                                            'invalid characters.',
-                                            container_name=container_name,
-                                            driver=self)
+            raise ContainerError(
+                value='Bad request when creating container: %s' %
+                      response.body,
+                container_name=container_name, driver=self)
 
         raise LibcloudError('Unexpected status code: %s' % (response.status),
                             driver=self)

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/test/common/test_google.py
----------------------------------------------------------------------
diff --git a/libcloud/test/common/test_google.py b/libcloud/test/common/test_google.py
index 1f1f2ef..b16488b 100644
--- a/libcloud/test/common/test_google.py
+++ b/libcloud/test/common/test_google.py
@@ -16,9 +16,10 @@
 Tests for Google Connection classes.
 """
 import datetime
+import mock
+import os
 import sys
 import unittest
-import os
 
 try:
     import simplejson as json
@@ -29,6 +30,7 @@ from libcloud.utils.py3 import httplib
 
 from libcloud.test import MockHttp, LibcloudTestCase
 from libcloud.common.google import (GoogleAuthError,
+                                    GoogleAuthType,
                                     GoogleBaseAuthConnection,
                                     GoogleInstalledAppAuthConnection,
                                     GoogleServiceAcctAuthConnection,
@@ -56,6 +58,8 @@ GCE_PARAMS_JSON_KEY = ('email@developer.gserviceaccount.com', JSON_KEY)
 GCE_PARAMS_KEY = ('email@developer.gserviceaccount.com', KEY_STR)
 GCE_PARAMS_IA = ('client_id', 'client_secret')
 GCE_PARAMS_GCE = ('foo', 'bar')
+GCS_S3_PARAMS = ('GOOG0123456789ABCXYZ',  # GOOG + 16 alphanumeric chars
+                 '0102030405060708091011121314151617181920')  # 40 base64 chars
 
 
 class MockJsonResponse(object):
@@ -67,8 +71,6 @@ class GoogleBaseAuthConnectionTest(LibcloudTestCase):
     """
     Tests for GoogleBaseAuthConnection
     """
-    GoogleBaseAuthConnection._now = lambda x: datetime.datetime(2013, 6, 26,
-                                                                19, 0, 0)
 
     def setUp(self):
         GoogleBaseAuthConnection.conn_classes = (GoogleAuthMockHttp,
@@ -89,7 +91,9 @@ class GoogleBaseAuthConnectionTest(LibcloudTestCase):
         new_headers = self.conn.add_default_headers(old_headers)
         self.assertEqual(new_headers, expected_headers)
 
-    def test_token_request(self):
+    @mock.patch('libcloud.common.google._now')
+    def test_token_request(self, mock_now):
+        mock_now.return_value = datetime.datetime(2013, 6, 26, 19, 0, 0)
         request_body = {'code': 'asdf', 'client_id': self.conn.user_id,
                         'client_secret': self.conn.key,
                         'redirect_uri': self.conn.redirect_uri,
@@ -133,6 +137,24 @@ class GoogleInstalledAppAuthConnectionTest(LibcloudTestCase):
         self.assertTrue('refresh_token' in new_token2)
 
 
+class GoogleAuthTypeTest(LibcloudTestCase):
+
+    def test_guess(self):
+        self.assertEqual(
+            GoogleAuthType.guess_type(GCE_PARAMS[0]),
+            GoogleAuthType.SA)
+        self.assertEqual(
+            GoogleAuthType.guess_type(GCE_PARAMS_IA[0]),
+            GoogleAuthType.IA)
+        with mock.patch('libcloud.common.google._is_gce', return_value=True):
+            self.assertEqual(
+                GoogleAuthType.guess_type(GCE_PARAMS_GCE[0]),
+                GoogleAuthType.GCE)
+        self.assertEqual(
+            GoogleAuthType.guess_type(GCS_S3_PARAMS[0]),
+            GoogleAuthType.GCS_S3)
+
+
 class GoogleBaseConnectionTest(LibcloudTestCase):
     """
     Tests for GoogleBaseConnection
@@ -161,7 +183,7 @@ class GoogleBaseConnectionTest(LibcloudTestCase):
         kwargs = {'scopes': self.mock_scopes}
 
         if SHA256:
-            kwargs['auth_type'] = 'SA'
+            kwargs['auth_type'] = GoogleAuthType.SA
             conn1 = GoogleBaseConnection(*GCE_PARAMS_PEM_KEY, **kwargs)
             self.assertTrue(isinstance(conn1.auth_conn,
                                        GoogleServiceAcctAuthConnection))
@@ -174,16 +196,20 @@ class GoogleBaseConnectionTest(LibcloudTestCase):
             self.assertTrue(isinstance(conn1.auth_conn,
                                        GoogleServiceAcctAuthConnection))
 
-        kwargs['auth_type'] = 'IA'
+        kwargs['auth_type'] = GoogleAuthType.IA
         conn2 = GoogleBaseConnection(*GCE_PARAMS_IA, **kwargs)
         self.assertTrue(isinstance(conn2.auth_conn,
                                    GoogleInstalledAppAuthConnection))
 
-        kwargs['auth_type'] = 'GCE'
+        kwargs['auth_type'] = GoogleAuthType.GCE
         conn3 = GoogleBaseConnection(*GCE_PARAMS_GCE, **kwargs)
         self.assertTrue(isinstance(conn3.auth_conn,
                                    GoogleGCEServiceAcctAuthConnection))
 
+        kwargs['auth_type'] = GoogleAuthType.GCS_S3
+        conn4 = GoogleBaseConnection(*GCS_S3_PARAMS, **kwargs)
+        self.assertIsNone(conn4.auth_conn)
+
     def test_add_default_headers(self):
         old_headers = {}
         new_expected_headers = {'Content-Type': 'application/json',

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/test/secrets.py-dist
----------------------------------------------------------------------
diff --git a/libcloud/test/secrets.py-dist b/libcloud/test/secrets.py-dist
index 43531c9..9048837 100644
--- a/libcloud/test/secrets.py-dist
+++ b/libcloud/test/secrets.py-dist
@@ -54,7 +54,8 @@ PACKET_PARAMS = ('api_key')
 
 # Storage
 STORAGE_S3_PARAMS = ('key', 'secret')
-STORAGE_GOOGLE_STORAGE_PARAMS = ('key', 'secret')
+# Google key = 20 char alphanumeric string starting with GOOG
+STORAGE_GOOGLE_STORAGE_PARAMS = ('GOOG0123456789ABCXYZ', 'secret')
 
 # Azure key is b64 encoded and must be decoded before signing requests
 STORAGE_AZURE_BLOBS_PARAMS = ('account', 'cGFzc3dvcmQ=')

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/test/storage/test_google_storage.py
----------------------------------------------------------------------
diff --git a/libcloud/test/storage/test_google_storage.py b/libcloud/test/storage/test_google_storage.py
index bd33572..b1d23b0 100644
--- a/libcloud/test/storage/test_google_storage.py
+++ b/libcloud/test/storage/test_google_storage.py
@@ -13,16 +13,28 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import copy
+import mock
 import sys
 import unittest
 
-from libcloud.utils.py3 import httplib
-
-from libcloud.storage.drivers.google_storage import GoogleStorageDriver
-from libcloud.test.storage.test_s3 import S3Tests, S3MockHttp
+import email.utils
 
+from libcloud.common.google import GoogleAuthType
+from libcloud.storage.drivers import google_storage
+from libcloud.test import LibcloudTestCase
 from libcloud.test.file_fixtures import StorageFileFixtures
 from libcloud.test.secrets import STORAGE_GOOGLE_STORAGE_PARAMS
+from libcloud.test.storage.test_s3 import S3Tests, S3MockHttp
+from libcloud.utils.py3 import httplib
+
+CONN_CLS = google_storage.GoogleStorageConnection
+STORAGE_CLS = google_storage.GoogleStorageDriver
+
+TODAY = email.utils.formatdate(usegmt=True)
+
+OAUTH2_MOCK = mock.patch(
+    'libcloud.common.google.GoogleBaseConnection._setup_oauth2', spec=True)
 
 
 class GoogleStorageMockHttp(S3MockHttp):
@@ -32,24 +44,231 @@ class GoogleStorageMockHttp(S3MockHttp):
         # test_get_object
         # Google uses a different HTTP header prefix for meta data
         body = self.fixtures.load('list_containers.xml')
-        headers = {'content-type': 'application/zip',
-                   'etag': '"e31208wqsdoj329jd"',
-                   'x-goog-meta-rabbits': 'monkeys',
-                   'content-length': 12345,
-                   'last-modified': 'Thu, 13 Sep 2012 07:13:22 GMT'
-                   }
+        headers = {
+            'content-type': 'application/zip',
+            'etag': '"e31208wqsdoj329jd"',
+            'x-goog-meta-rabbits': 'monkeys',
+            'content-length': 12345,
+            'last-modified': 'Thu, 13 Sep 2012 07:13:22 GMT'
+        }
+
+        return (
+            httplib.OK,
+            body,
+            headers,
+            httplib.responses[httplib.OK]
+        )
+
+
+class GoogleStorageConnectionTest(LibcloudTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        super(LibcloudTestCase, cls).setUpClass()
+        OAUTH2_MOCK.start()
+
+    @classmethod
+    def tearDownClass(cls):
+        super(LibcloudTestCase, cls).tearDownClass()
+        OAUTH2_MOCK.stop()
+
+    @mock.patch('email.utils.formatdate')
+    @mock.patch('libcloud.common.google.'
+                'GoogleBaseConnection.add_default_headers')
+    def test_add_default_headers(self, mock_base_method, mock_formatdate):
+        mock_formatdate.return_value = TODAY
+        starting_headers = {'starting': 'headers'}
+        changed_headers = {'changed': 'headers'}
+        project = 'foo-project'
+
+        # Should use base add_default_headers
+        mock_base_method.return_value = dict(changed_headers)
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCE)
+        conn.get_project = lambda: None
+        self.assertEqual(
+            conn.add_default_headers(dict(starting_headers)),
+            dict(changed_headers)
+        )
+        mock_base_method.assert_called_once_with(dict(starting_headers))
+        mock_base_method.reset_mock()
 
-        return (httplib.OK,
-                body,
-                headers,
-                httplib.responses[httplib.OK])
+        # Base add_default_headers with project
+        mock_base_method.return_value = dict(changed_headers)
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCE)
+        conn.get_project = lambda: project
+        headers = dict(changed_headers)
+        headers[CONN_CLS.PROJECT_ID_HEADER] = project
+        self.assertEqual(
+            conn.add_default_headers(dict(starting_headers)),
+            headers
+        )
+        mock_base_method.assert_called_once_with(dict(starting_headers))
+        mock_base_method.reset_mock()
+
+        # Should use S3 add_default_headers
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCS_S3)
+        conn.get_project = lambda: None
+        headers = dict(starting_headers)
+        headers['Date'] = TODAY
+        self.assertEqual(conn.add_default_headers(dict(starting_headers)),
+                         headers)
+        mock_base_method.assert_not_called()
+
+        # S3 add_default_headers with project
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCS_S3)
+        conn.get_project = lambda: project
+        headers = dict(starting_headers)
+        headers['Date'] = TODAY
+        headers[CONN_CLS.PROJECT_ID_HEADER] = project
+        self.assertEqual(conn.add_default_headers(dict(starting_headers)),
+                         headers)
+        mock_base_method.assert_not_called()
+
+    @mock.patch('libcloud.common.google.GoogleBaseConnection.encode_data')
+    def test_encode_data(self, mock_base_method):
+        old_data = 'old data!'
+        new_data = 'new data!'
+
+        # Should use Base encode_data
+        mock_base_method.return_value = new_data
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCE)
+        self.assertEqual(conn.encode_data(old_data), new_data)
+        mock_base_method.assert_called_once_with(old_data)
+        mock_base_method.reset_mock()
+
+        # Should use S3 encode_data (which does nothing)
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCS_S3)
+        self.assertEqual(conn.encode_data(old_data), old_data)
+        mock_base_method.assert_not_called()
+
+    @mock.patch('libcloud.storage.drivers.s3.'
+                'BaseS3Connection.get_auth_signature')
+    def test_get_s3_auth_signature(self, mock_s3_auth_sig_method):
+        # Check that the S3 HMAC signature method is used.
+        # Check that headers are copied and modified before calling the method.
+        mock_s3_auth_sig_method.return_value = 'mock signature!'
+        starting_params = {}
+        starting_headers = {
+            'Date': TODAY,
+            'x-goog-foo': 'MAINTAIN UPPERCASE!',
+            'x-Goog-bar': 'Header should be lowered',
+            'Other': 'LOWER THIS!'
+        }
+        modified_headers = {
+            'date': TODAY,
+            'x-goog-foo': 'MAINTAIN UPPERCASE!',
+            'x-goog-bar': 'Header should be lowered',
+            'other': 'lower this!'
+        }
+
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCS_S3)
+        conn.method = 'GET'
+        conn.action = '/path'
+        result = conn._get_s3_auth_signature(starting_params, starting_headers)
+        self.assertNotEqual(starting_headers, modified_headers)
+        self.assertEqual(result, 'mock signature!')
+        mock_s3_auth_sig_method.assert_called_once_with(
+            method='GET',
+            headers=modified_headers,
+            params=starting_params,
+            expires=None,
+            secret_key='bar_key',
+            path='/path',
+            vendor_prefix='x-goog'
+        )
+
+    @mock.patch('libcloud.common.google.GoogleBaseConnection.pre_connect_hook')
+    def test_pre_connect_hook_oauth2(self, mock_base_hook):
+        # Should use BaseGoogleConnection pre_connect_hook
+        # Check that the base hook is called.
+        starting_params = {'starting': 'params'}
+        changed_params = {'changed': 'params'}
+        starting_headers = {'starting': 'headers'}
+        changed_headers = {'changed': 'headers'}
+
+        mock_base_hook.return_value = (dict(changed_params),
+                                       dict(changed_headers))
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCE)
+        result = conn.pre_connect_hook(
+            dict(starting_params),
+            dict(starting_headers)
+        )
+        self.assertEqual(
+            result,
+            (dict(changed_params), dict(changed_headers))
+        )
+        mock_base_hook.assert_called_once_with(
+            dict(starting_params),
+            dict(starting_headers)
+        )
+        mock_base_hook.reset_mock()
+
+    @mock.patch('libcloud.common.google.GoogleBaseConnection.pre_connect_hook')
+    def test_pre_connect_hook_hmac(self, mock_base_hook):
+        # Check that we call for a HMAC signature, passing params and headers
+        # Check that we properly apply the HMAC signature.
+        # Check that we don't use the BaseGoogleConnection pre_connect_hook.
+        starting_params = {'starting': 'params'}
+        starting_headers = {'starting': 'headers'}
+
+        def fake_hmac_method(params, headers):
+            # snapshot the params and headers passed (they are modified later)
+            fake_hmac_method.params_passed = copy.deepcopy(params)
+            fake_hmac_method.headers_passed = copy.deepcopy(headers)
+            return 'fake signature!'
+
+        conn = CONN_CLS('foo_user', 'bar_key', secure=True,
+                        auth_type=GoogleAuthType.GCS_S3)
+        conn._get_s3_auth_signature = fake_hmac_method
+        conn.action = 'GET'
+        conn.method = '/foo'
+        expected_headers = dict(starting_headers)
+        expected_headers['Authorization'] = (
+            '%s %s:%s' % (google_storage.SIGNATURE_IDENTIFIER, 'foo_user',
+                          'fake signature!')
+        )
+        result = conn.pre_connect_hook(
+            dict(starting_params),
+            dict(starting_headers)
+        )
+        self.assertEqual(
+            result,
+            (dict(starting_params), expected_headers)
+        )
+        mock_base_hook.assert_not_called()
+        self.assertEqual(
+            fake_hmac_method.params_passed,
+            starting_params
+        )
+        self.assertEqual(
+            fake_hmac_method.headers_passed,
+            starting_headers
+        )
 
 
 class GoogleStorageTests(S3Tests):
-    driver_type = GoogleStorageDriver
+    driver_type = STORAGE_CLS
     driver_args = STORAGE_GOOGLE_STORAGE_PARAMS
     mock_response_klass = GoogleStorageMockHttp
 
+    @classmethod
+    def setUpClass(cls):
+        super(S3Tests, cls).setUpClass()
+        OAUTH2_MOCK.start()
+
+    @classmethod
+    def tearDownClass(cls):
+        super(S3Tests, cls).tearDownClass()
+        OAUTH2_MOCK.stop()
+
     def test_billing_not_enabled(self):
         # TODO
         pass

http://git-wip-us.apache.org/repos/asf/libcloud/blob/3849f65f/libcloud/test/storage/test_s3.py
----------------------------------------------------------------------
diff --git a/libcloud/test/storage/test_s3.py b/libcloud/test/storage/test_s3.py
index 667e57c..2a6e873 100644
--- a/libcloud/test/storage/test_s3.py
+++ b/libcloud/test/storage/test_s3.py
@@ -13,10 +13,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import base64
+import hmac
 import os
 import sys
 import unittest
 
+from hashlib import sha1
+
 try:
     from lxml import etree as ET
 except ImportError:
@@ -30,16 +34,19 @@ from libcloud.common.types import InvalidCredsError
 from libcloud.common.types import LibcloudError, MalformedResponseError
 from libcloud.storage.base import Container, Object
 from libcloud.storage.types import ContainerDoesNotExistError
+from libcloud.storage.types import ContainerError
 from libcloud.storage.types import ContainerIsNotEmptyError
 from libcloud.storage.types import InvalidContainerNameError
 from libcloud.storage.types import ObjectDoesNotExistError
 from libcloud.storage.types import ObjectHashMismatchError
+from libcloud.storage.drivers.s3 import BaseS3Connection
 from libcloud.storage.drivers.s3 import S3StorageDriver, S3USWestStorageDriver
 from libcloud.storage.drivers.s3 import S3EUWestStorageDriver
 from libcloud.storage.drivers.s3 import S3APSEStorageDriver
 from libcloud.storage.drivers.s3 import S3APNEStorageDriver
 from libcloud.storage.drivers.s3 import CHUNK_SIZE
 from libcloud.storage.drivers.dummy import DummyIterator
+from libcloud.utils.py3 import b
 
 from libcloud.test import StorageMockHttp, MockRawResponse  # pylint: disable-msg=E0611
 from libcloud.test import MockHttpTestCase  # pylint: disable-msg=E0611
@@ -458,6 +465,26 @@ class S3Tests(unittest.TestCase):
         self.driver = self.driver_type(*self.driver_args, token='asdf')
         self.driver.list_containers()
 
+    def test_signature(self):
+        secret_key = 'ssssh!'
+        sig = BaseS3Connection.get_auth_signature(
+            method='GET',
+            headers={'foo': 'bar',
+                     'content-type': 'TYPE!',
+                     'x-aws-test': 'test_value'},
+            params={'hello': 'world'},
+            expires=None,
+            secret_key=secret_key,
+            path='/',
+            vendor_prefix='x-aws'
+        )
+        string_to_sign = 'GET\n\nTYPE!\n\nx-aws-test:test_value\n/'
+        b64_hmac = base64.b64encode(
+            hmac.new(b(secret_key), b(string_to_sign), digestmod=sha1).digest()
+        )
+        expected_sig = b64_hmac.decode('utf-8')
+        self.assertEqual(sig, expected_sig)
+
     def test_bucket_is_located_in_different_region(self):
         self.mock_response_klass.type = 'DIFFERENT_REGION'
         try:
@@ -571,12 +598,12 @@ class S3Tests(unittest.TestCase):
         self.assertEqual(obj.extra['content_type'], 'application/zip')
         self.assertEqual(obj.meta_data['rabbits'], 'monkeys')
 
-    def test_create_container_invalid_name(self):
-        # invalid container name
+    def test_create_container_bad_request(self):
+        # invalid container name, returns a 400 bad request
         self.mock_response_klass.type = 'INVALID_NAME'
         try:
             self.driver.create_container(container_name='new_container')
-        except InvalidContainerNameError:
+        except ContainerError:
             pass
         else:
             self.fail('Exception was not thrown')


Mime
View raw message