libcloud-notifications mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From anthonys...@apache.org
Subject [05/56] [abbrv] libcloud git commit: Removed sdist
Date Mon, 14 Nov 2016 23:50:52 GMT
http://git-wip-us.apache.org/repos/asf/libcloud/blob/8afcda91/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/backblaze_b2.py
----------------------------------------------------------------------
diff --git a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/backblaze_b2.py b/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/backblaze_b2.py
deleted file mode 100644
index fe2f335..0000000
--- a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/backblaze_b2.py
+++ /dev/null
@@ -1,525 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Driver for Backblaze B2 service.
-"""
-
-import base64
-import hashlib
-
-try:
-    import simplejson as json
-except ImportError:
-    import json
-
-from libcloud.utils.py3 import b
-from libcloud.utils.py3 import httplib
-from libcloud.utils.py3 import urlparse
-from libcloud.utils.py3 import next
-from libcloud.utils.files import read_in_chunks
-from libcloud.utils.files import exhaust_iterator
-from libcloud.utils.escape import sanitize_object_name
-
-from libcloud.common.base import ConnectionUserAndKey
-from libcloud.common.base import JsonResponse
-from libcloud.common.types import InvalidCredsError
-from libcloud.common.types import LibcloudError
-from libcloud.storage.providers import Provider
-from libcloud.storage.base import Object, Container, StorageDriver
-from libcloud.storage.types import ContainerDoesNotExistError
-from libcloud.storage.types import ObjectDoesNotExistError
-
-__all__ = [
-    'BackblazeB2StorageDriver',
-
-    'BackblazeB2Connection',
-    'BackblazeB2AuthConnection'
-]
-
-AUTH_API_HOST = 'api.backblaze.com'
-API_PATH = '/b2api/v1/'
-
-
-class BackblazeB2Response(JsonResponse):
-    def success(self):
-        return self.status in [httplib.OK, httplib.CREATED, httplib.ACCEPTED]
-
-    def parse_error(self):
-        status = int(self.status)
-        body = self.parse_body()
-
-        if status == httplib.UNAUTHORIZED:
-            raise InvalidCredsError(body['message'])
-
-        return self.body
-
-
-class BackblazeB2AuthConnection(ConnectionUserAndKey):
-    host = AUTH_API_HOST
-    secure = True
-    responseCls = BackblazeB2Response
-
-    def __init__(self, *args, **kwargs):
-        super(BackblazeB2AuthConnection, self).__init__(*args, **kwargs)
-
-        # Those attributes are populated after authentication
-        self.account_id = None
-        self.api_url = None
-        self.api_host = None
-        self.download_url = None
-        self.download_host = None
-        self.auth_token = None
-
-    def authenticate(self, force=False):
-        """
-        :param force: Force authentication if if we have already obtained the
-                      token.
-        :type force: ``bool``
-        """
-        if not self._is_authentication_needed(force=force):
-            return self
-
-        headers = {}
-        action = 'b2_authorize_account'
-        auth_b64 = base64.b64encode(b('%s:%s' % (self.user_id, self.key)))
-        headers['Authorization'] = 'Basic %s' % (auth_b64.decode('utf-8'))
-
-        action = API_PATH + 'b2_authorize_account'
-        resp = self.request(action=action, headers=headers, method='GET')
-
-        if resp.status == httplib.OK:
-            self._parse_and_set_auth_info(data=resp.object)
-        else:
-            raise Exception('Failed to authenticate: %s' % (str(resp.object)))
-
-        return self
-
-    def _parse_and_set_auth_info(self, data):
-        result = {}
-        self.account_id = data['accountId']
-        self.api_url = data['apiUrl']
-        self.download_url = data['downloadUrl']
-        self.auth_token = data['authorizationToken']
-
-        parsed_api_url = urlparse.urlparse(self.api_url)
-        self.api_host = parsed_api_url.netloc
-
-        parsed_download_url = urlparse.urlparse(self.download_url)
-        self.download_host = parsed_download_url.netloc
-
-        return result
-
-    def _is_authentication_needed(self, force=False):
-        if not self.auth_token or force:
-            return True
-
-        return False
-
-
-class BackblazeB2Connection(ConnectionUserAndKey):
-    host = None  # Note: host is set after authentication
-    secure = True
-    responseCls = BackblazeB2Response
-    authCls = BackblazeB2AuthConnection
-
-    def __init__(self, *args, **kwargs):
-        super(BackblazeB2Connection, self).__init__(*args, **kwargs)
-
-        # Stores info retrieved after authentication (auth token, api url,
-        # dowload url).
-        self._auth_conn = self.authCls(*args, **kwargs)
-
-    def download_request(self, action, params=None):
-        # Lazily perform authentication
-        auth_conn = self._auth_conn.authenticate()
-
-        # Set host to the download server
-        self.host = auth_conn.download_host
-
-        action = '/file/' + action
-        method = 'GET'
-        raw = True
-        response = self._request(auth_conn=auth_conn, action=action,
-                                 params=params, method=method,
-                                 raw=raw)
-        return response
-
-    def upload_request(self, action, headers, upload_host, auth_token, data):
-        # Lazily perform authentication
-        auth_conn = self._auth_conn.authenticate()
-
-        # Upload host is dynamically retrieved for each upload request
-        self.host = upload_host
-
-        method = 'POST'
-        raw = False
-        response = self._request(auth_conn=auth_conn, action=action,
-                                 params=None, data=data,
-                                 headers=headers, method=method,
-                                 raw=raw, auth_token=auth_token)
-        return response
-
-    def request(self, action, params=None, data=None, headers=None,
-                method='GET', raw=False, include_account_id=False):
-        params = params or {}
-        headers = headers or {}
-
-        # Lazily perform authentication
-        auth_conn = self._auth_conn.authenticate()
-
-        # Set host
-        self.host = auth_conn.api_host
-
-        # Include Content-Type
-        if not raw and data:
-            headers['Content-Type'] = 'application/json'
-
-        # Include account id
-        if include_account_id:
-            if method == 'GET':
-                params['accountId'] = auth_conn.account_id
-            elif method == 'POST':
-                data = data or {}
-                data['accountId'] = auth_conn.account_id
-
-        action = API_PATH + action
-        if data:
-            data = json.dumps(data)
-
-        response = self._request(auth_conn=self._auth_conn, action=action,
-                                 params=params, data=data,
-                                 method=method, headers=headers, raw=raw)
-        return response
-
-    def _request(self, auth_conn, action, params=None, data=None, headers=None,
-                 method='GET', raw=False, auth_token=None):
-        params = params or {}
-        headers = headers or {}
-
-        if not auth_token:
-            # If auth token is not explicitly provided, use the default one
-            auth_token = self._auth_conn.auth_token
-
-        # Include auth token
-        headers['Authorization'] = '%s' % (auth_token)
-        response = super(BackblazeB2Connection, self).request(action=action,
-                                                              params=params,
-                                                              data=data,
-                                                              method=method,
-                                                              headers=headers,
-                                                              raw=raw)
-        return response
-
-
-class BackblazeB2StorageDriver(StorageDriver):
-    connectionCls = BackblazeB2Connection
-    name = 'Backblaze B2'
-    website = 'https://www.backblaze.com/b2/'
-    type = Provider.BACKBLAZE_B2
-    hash_type = 'sha1'
-    supports_chunked_encoding = False
-
-    def iterate_containers(self):
-        resp = self.connection.request(action='b2_list_buckets',
-                                       method='GET',
-                                       include_account_id=True)
-        containers = self._to_containers(data=resp.object)
-        return containers
-
-    def iterate_container_objects(self, container):
-        # TODO: Support pagination
-        params = {'bucketId': container.extra['id']}
-        resp = self.connection.request(action='b2_list_file_names',
-                                       method='GET',
-                                       params=params)
-        objects = self._to_objects(data=resp.object, container=container)
-        return objects
-
-    def get_container(self, container_name):
-        containers = self.iterate_containers()
-        container = next((c for c in containers if c.name == container_name),
-                         None)
-        if container:
-            return container
-        else:
-            raise ContainerDoesNotExistError(value=None, driver=self,
-                                             container_name=container_name)
-
-    def get_object(self, container_name, object_name):
-        container = self.get_container(container_name=container_name)
-        objects = self.iterate_container_objects(container=container)
-
-        obj = next((obj for obj in objects if obj.name == object_name), None)
-
-        if obj is not None:
-            return obj
-        else:
-            raise ObjectDoesNotExistError(value=None, driver=self,
-                                          object_name=object_name)
-
-    def create_container(self, container_name, ex_type='allPrivate'):
-        data = {}
-        data['bucketName'] = container_name
-        data['bucketType'] = ex_type
-        resp = self.connection.request(action='b2_create_bucket',
-                                       data=data, method='POST',
-                                       include_account_id=True)
-        container = self._to_container(item=resp.object)
-        return container
-
-    def delete_container(self, container):
-        data = {}
-        data['bucketId'] = container.extra['id']
-        resp = self.connection.request(action='b2_delete_bucket',
-                                       data=data, method='POST',
-                                       include_account_id=True)
-        return resp.status == httplib.OK
-
-    def download_object(self, obj, destination_path, overwrite_existing=False,
-                        delete_on_failure=True):
-        action = self._get_object_download_path(container=obj.container,
-                                                obj=obj)
-        response = self.connection.download_request(action=action)
-
-        # TODO: Include metadata from response headers
-        return self._get_object(obj=obj, callback=self._save_object,
-                                response=response,
-                                callback_kwargs={
-                                    'obj': obj,
-                                    'response': response.response,
-                                    'destination_path': destination_path,
-                                    'overwrite_existing': overwrite_existing,
-                                    'delete_on_failure': delete_on_failure
-                                },
-                                success_status_code=httplib.OK)
-
-    def download_object_as_stream(self, obj, chunk_size=None):
-        action = self._get_object_download_path(container=obj.container,
-                                                obj=obj)
-        response = self.connection.download_request(action=action)
-
-        return self._get_object(obj=obj, callback=read_in_chunks,
-                                response=response,
-                                callback_kwargs={'iterator': response.response,
-                                                 'chunk_size': chunk_size},
-                                success_status_code=httplib.OK)
-
-    def upload_object(self, file_path, container, object_name, extra=None,
-                      verify_hash=True, headers=None):
-        """
-        Upload an object.
-
-        Note: This will override file with a same name if it already exists.
-        """
-        # Note: We don't use any of the base driver functions since Backblaze
-        # API requires you to provide SHA1 has upfront and the base methods
-        # don't support that
-
-        with open(file_path, 'rb') as fp:
-            iterator = iter(fp)
-            iterator = read_in_chunks(iterator=iterator)
-            data = exhaust_iterator(iterator=iterator)
-
-        obj = self._perform_upload(data=data, container=container,
-                                   object_name=object_name,
-                                   extra=extra,
-                                   verify_hash=verify_hash,
-                                   headers=headers)
-
-        return obj
-
-    def upload_object_via_stream(self, iterator, container, object_name,
-                                 extra=None, headers=None):
-        """
-        Upload an object.
-
-        Note: Backblaze does not yet support uploading via stream,
-        so this calls upload_object internally requiring the object data
-        to be loaded into memory at once
-        """
-
-        iterator = read_in_chunks(iterator=iterator)
-        data = exhaust_iterator(iterator=iterator)
-
-        obj = self._perform_upload(data=data, container=container,
-                                   object_name=object_name,
-                                   extra=extra,
-                                   headers=headers)
-
-        return obj
-
-    def delete_object(self, obj):
-        data = {}
-        data['fileName'] = obj.name
-        data['fileId'] = obj.extra['fileId']
-        resp = self.connection.request(action='b2_delete_file_version',
-                                       data=data, method='POST')
-        return resp.status == httplib.OK
-
-    def ex_get_object(self, object_id):
-        params = {}
-        params['fileId'] = object_id
-        resp = self.connection.request(action='b2_get_file_info',
-                                       method='GET',
-                                       params=params)
-        obj = self._to_object(item=resp.object, container=None)
-        return obj
-
-    def ex_hide_object(self, container_id, object_name):
-        data = {}
-        data['bucketId'] = container_id
-        data['fileName'] = object_name
-        resp = self.connection.request(action='b2_hide_file',
-                                       data=data, method='POST')
-        obj = self._to_object(item=resp.object, container=None)
-        return obj
-
-    def ex_list_object_versions(self, container_id, ex_start_file_name=None,
-                                ex_start_file_id=None, ex_max_file_count=None):
-        params = {}
-        params['bucketId'] = container_id
-
-        if ex_start_file_name:
-            params['startFileName'] = ex_start_file_name
-
-        if ex_start_file_id:
-            params['startFileId'] = ex_start_file_id
-
-        if ex_max_file_count:
-            params['maxFileCount'] = ex_max_file_count
-
-        resp = self.connection.request(action='b2_list_file_versions',
-                                       params=params, method='GET')
-        objects = self._to_objects(data=resp.object, container=None)
-        return objects
-
-    def ex_get_upload_data(self, container_id):
-        """
-        Retrieve information used for uploading files (upload url, auth token,
-        etc).
-
-        :rype: ``dict``
-        """
-        # TODO: This is static (AFAIK) so it could be cached
-        params = {}
-        params['bucketId'] = container_id
-        response = self.connection.request(action='b2_get_upload_url',
-                                           method='GET',
-                                           params=params)
-        return response.object
-
-    def ex_get_upload_url(self, container_id):
-        """
-        Retrieve URL used for file uploads.
-
-        :rtype: ``str``
-        """
-        result = self.ex_get_upload_data(container_id=container_id)
-        upload_url = result['uploadUrl']
-        return upload_url
-
-    def _to_containers(self, data):
-        result = []
-        for item in data['buckets']:
-            container = self._to_container(item=item)
-            result.append(container)
-
-        return result
-
-    def _to_container(self, item):
-        extra = {}
-        extra['id'] = item['bucketId']
-        extra['bucketType'] = item['bucketType']
-        container = Container(name=item['bucketName'], extra=extra,
-                              driver=self)
-        return container
-
-    def _to_objects(self, data, container):
-        result = []
-        for item in data['files']:
-            obj = self._to_object(item=item, container=container)
-            result.append(obj)
-
-        return result
-
-    def _to_object(self, item, container=None):
-        extra = {}
-        extra['fileId'] = item['fileId']
-        extra['uploadTimestamp'] = item.get('uploadTimestamp', None)
-        size = item.get('size', item.get('contentLength', None))
-        hash = item.get('contentSha1', None)
-        meta_data = item.get('fileInfo', {})
-        obj = Object(name=item['fileName'], size=size, hash=hash, extra=extra,
-                     meta_data=meta_data, container=container, driver=self)
-        return obj
-
-    def _get_object_download_path(self, container, obj):
-        """
-        Return a path used in the download requests.
-
-        :rtype: ``str``
-        """
-        path = container.name + '/' + obj.name
-        return path
-
-    def _perform_upload(self, data, container, object_name, extra=None,
-                        verify_hash=True, headers=None):
-
-        if isinstance(data, str):
-            data = bytearray(data)
-
-        object_name = sanitize_object_name(object_name)
-
-        extra = extra or {}
-        content_type = extra.get('content_type', 'b2/x-auto')
-        meta_data = extra.get('meta_data', {})
-
-        # Note: Backblaze API doesn't support chunked encoding and we need to
-        # provide Content-Length up front (this is one inside _upload_object):/
-        headers = headers or {}
-        headers['X-Bz-File-Name'] = object_name
-        headers['Content-Type'] = content_type
-
-        sha1 = hashlib.sha1()
-        sha1.update(b(data))
-        headers['X-Bz-Content-Sha1'] = sha1.hexdigest()
-
-        # Include optional meta-data (up to 10 items)
-        for key, value in meta_data:
-            # TODO: Encode / escape key
-            headers['X-Bz-Info-%s' % (key)] = value
-
-        upload_data = self.ex_get_upload_data(
-            container_id=container.extra['id'])
-        upload_token = upload_data['authorizationToken']
-        parsed_url = urlparse.urlparse(upload_data['uploadUrl'])
-
-        upload_host = parsed_url.netloc
-        request_path = parsed_url.path
-
-        response = self.connection.upload_request(action=request_path,
-                                                  headers=headers,
-                                                  upload_host=upload_host,
-                                                  auth_token=upload_token,
-                                                  data=data)
-
-        if response.status == httplib.OK:
-            obj = self._to_object(item=response.object, container=container)
-            return obj
-        else:
-            body = response.response.read()
-            raise LibcloudError('Upload failed. status_code=%s, body=%s' %
-                                (response.status, body), driver=self)

http://git-wip-us.apache.org/repos/asf/libcloud/blob/8afcda91/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/cloudfiles.py
----------------------------------------------------------------------
diff --git a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/cloudfiles.py b/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/cloudfiles.py
deleted file mode 100644
index 2502d42..0000000
--- a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/cloudfiles.py
+++ /dev/null
@@ -1,972 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from hashlib import sha1
-import hmac
-import os
-from time import time
-
-from libcloud.utils.py3 import httplib
-from libcloud.utils.py3 import urlencode
-
-try:
-    import simplejson as json
-except ImportError:
-    import json
-
-from libcloud.utils.py3 import PY3
-from libcloud.utils.py3 import b
-from libcloud.utils.py3 import urlquote
-
-if PY3:
-    from io import FileIO as file
-
-from libcloud.utils.files import read_in_chunks
-from libcloud.common.types import MalformedResponseError, LibcloudError
-from libcloud.common.base import Response, RawResponse
-
-from libcloud.storage.providers import Provider
-from libcloud.storage.base import Object, Container, StorageDriver
-from libcloud.storage.types import ContainerAlreadyExistsError
-from libcloud.storage.types import ContainerDoesNotExistError
-from libcloud.storage.types import ContainerIsNotEmptyError
-from libcloud.storage.types import ObjectDoesNotExistError
-from libcloud.storage.types import ObjectHashMismatchError
-from libcloud.storage.types import InvalidContainerNameError
-from libcloud.common.openstack import OpenStackBaseConnection
-from libcloud.common.openstack import OpenStackDriverMixin
-
-from libcloud.common.rackspace import AUTH_URL
-
-CDN_HOST = 'cdn.clouddrive.com'
-API_VERSION = 'v1.0'
-
-# Keys which are used to select a correct endpoint from the service catalog.
-INTERNAL_ENDPOINT_KEY = 'internalURL'
-PUBLIC_ENDPOINT_KEY = 'publicURL'
-
-
-class CloudFilesResponse(Response):
-    valid_response_codes = [httplib.NOT_FOUND, httplib.CONFLICT]
-
-    def success(self):
-        i = int(self.status)
-        return i >= 200 and i <= 299 or i in self.valid_response_codes
-
-    def parse_body(self):
-        if not self.body:
-            return None
-
-        if 'content-type' in self.headers:
-            key = 'content-type'
-        elif 'Content-Type' in self.headers:
-            key = 'Content-Type'
-        else:
-            raise LibcloudError('Missing content-type header')
-
-        content_type = self.headers[key]
-        if content_type.find(';') != -1:
-            content_type = content_type.split(';')[0]
-
-        if content_type == 'application/json':
-            try:
-                data = json.loads(self.body)
-            except:
-                raise MalformedResponseError('Failed to parse JSON',
-                                             body=self.body,
-                                             driver=CloudFilesStorageDriver)
-        elif content_type == 'text/plain':
-            data = self.body
-        else:
-            data = self.body
-
-        return data
-
-
-class CloudFilesRawResponse(CloudFilesResponse, RawResponse):
-    pass
-
-
-class OpenStackSwiftConnection(OpenStackBaseConnection):
-    """
-    Connection class for the OpenStack Swift endpoint.
-    """
-
-    responseCls = CloudFilesResponse
-    rawResponseCls = CloudFilesRawResponse
-
-    auth_url = AUTH_URL
-    _auth_version = '1.0'
-
-    # TODO: Reverse the relationship - Swift -> CloudFiles
-    def __init__(self, user_id, key, secure=True, **kwargs):
-        # Ignore this for now
-        kwargs.pop('use_internal_url', None)
-        super(OpenStackSwiftConnection, self).__init__(user_id, key,
-                                                       secure=secure,
-                                                       **kwargs)
-        self.api_version = API_VERSION
-        self.accept_format = 'application/json'
-
-        self._service_type = self._ex_force_service_type or 'object-store'
-        self._service_name = self._ex_force_service_name or 'swift'
-
-        if self._ex_force_service_region:
-            self._service_region = self._ex_force_service_region
-        else:
-            self._service_region = None
-
-    def get_endpoint(self, *args, **kwargs):
-        if '2.0' in self._auth_version:
-            endpoint = self.service_catalog.get_endpoint(
-                service_type=self._service_type,
-                name=self._service_name,
-                region=self._service_region)
-        elif ('1.1' in self._auth_version) or ('1.0' in self._auth_version):
-            endpoint = self.service_catalog.get_endpoint(
-                name=self._service_name, region=self._service_region)
-
-        if endpoint:
-            return endpoint.url
-        else:
-            raise LibcloudError('Could not find specified endpoint')
-
-    def request(self, action, params=None, data='', headers=None, method='GET',
-                raw=False, cdn_request=False):
-        if not headers:
-            headers = {}
-        if not params:
-            params = {}
-
-        self.cdn_request = cdn_request
-        params['format'] = 'json'
-
-        if method in ['POST', 'PUT'] and 'Content-Type' not in headers:
-            headers.update({'Content-Type': 'application/json; charset=UTF-8'})
-
-        return super(OpenStackSwiftConnection, self).request(
-            action=action,
-            params=params, data=data,
-            method=method, headers=headers,
-            raw=raw)
-
-
-class CloudFilesConnection(OpenStackSwiftConnection):
-    """
-    Base connection class for the Cloudfiles driver.
-    """
-
-    responseCls = CloudFilesResponse
-    rawResponseCls = CloudFilesRawResponse
-
-    auth_url = AUTH_URL
-    _auth_version = '2.0'
-
-    def __init__(self, user_id, key, secure=True,
-                 use_internal_url=False, **kwargs):
-        super(CloudFilesConnection, self).__init__(user_id, key, secure=secure,
-                                                   **kwargs)
-        self.api_version = API_VERSION
-        self.accept_format = 'application/json'
-        self.cdn_request = False
-        self.use_internal_url = use_internal_url
-
-    def get_endpoint(self):
-        region = self._ex_force_service_region.upper()
-
-        if self.use_internal_url:
-            endpoint_type = 'internal'
-        else:
-            endpoint_type = 'external'
-
-        if '2.0' in self._auth_version:
-            ep = self.service_catalog.get_endpoint(
-                service_type='object-store',
-                name='cloudFiles',
-                region=region,
-                endpoint_type=endpoint_type)
-            cdn_ep = self.service_catalog.get_endpoint(
-                service_type='rax:object-cdn',
-                name='cloudFilesCDN',
-                region=region,
-                endpoint_type=endpoint_type)
-        else:
-            raise LibcloudError(
-                'Auth version "%s" not supported' % (self._auth_version))
-
-        # if this is a CDN request, return the cdn url instead
-        if self.cdn_request:
-            ep = cdn_ep
-
-        if not ep or not ep.url:
-            raise LibcloudError('Could not find specified endpoint')
-
-        return ep.url
-
-    def request(self, action, params=None, data='', headers=None, method='GET',
-                raw=False, cdn_request=False):
-        if not headers:
-            headers = {}
-        if not params:
-            params = {}
-
-        self.cdn_request = cdn_request
-        params['format'] = 'json'
-
-        if method in ['POST', 'PUT'] and 'Content-Type' not in headers:
-            headers.update({'Content-Type': 'application/json; charset=UTF-8'})
-
-        return super(CloudFilesConnection, self).request(
-            action=action,
-            params=params, data=data,
-            method=method, headers=headers,
-            raw=raw, cdn_request=cdn_request)
-
-
-class CloudFilesStorageDriver(StorageDriver, OpenStackDriverMixin):
-    """
-    CloudFiles driver.
-    """
-    name = 'CloudFiles'
-    website = 'http://www.rackspace.com/'
-
-    connectionCls = CloudFilesConnection
-    hash_type = 'md5'
-    supports_chunked_encoding = True
-
-    def __init__(self, key, secret=None, secure=True, host=None, port=None,
-                 region='ord', use_internal_url=False, **kwargs):
-        """
-        @inherits:  :class:`StorageDriver.__init__`
-
-        :param region: ID of the region which should be used.
-        :type region: ``str``
-        """
-        # This is here for backard compatibility
-        if 'ex_force_service_region' in kwargs:
-            region = kwargs['ex_force_service_region']
-
-        self.use_internal_url = use_internal_url
-        OpenStackDriverMixin.__init__(self, (), **kwargs)
-        super(CloudFilesStorageDriver, self).__init__(key=key, secret=secret,
-                                                      secure=secure, host=host,
-                                                      port=port, region=region,
-                                                      **kwargs)
-
-    @classmethod
-    def list_regions(cls):
-        return ['ord', 'dfw', 'iad', 'lon', 'hkg', 'syd']
-
-    def iterate_containers(self):
-        response = self.connection.request('')
-
-        if response.status == httplib.NO_CONTENT:
-            return []
-        elif response.status == httplib.OK:
-            return self._to_container_list(json.loads(response.body))
-
-        raise LibcloudError('Unexpected status code: %s' % (response.status))
-
-    def get_container(self, container_name):
-        container_name_encoded = self._encode_container_name(container_name)
-        response = self.connection.request('/%s' % (container_name_encoded),
-                                           method='HEAD')
-
-        if response.status == httplib.NO_CONTENT:
-            container = self._headers_to_container(
-                container_name, response.headers)
-            return container
-        elif response.status == httplib.NOT_FOUND:
-            raise ContainerDoesNotExistError(None, self, container_name)
-
-        raise LibcloudError('Unexpected status code: %s' % (response.status))
-
-    def get_object(self, container_name, object_name):
-        container = self.get_container(container_name)
-        container_name_encoded = self._encode_container_name(container_name)
-        object_name_encoded = self._encode_object_name(object_name)
-
-        response = self.connection.request('/%s/%s' % (container_name_encoded,
-                                                       object_name_encoded),
-                                           method='HEAD')
-        if response.status in [httplib.OK, httplib.NO_CONTENT]:
-            obj = self._headers_to_object(
-                object_name, container, response.headers)
-            return obj
-        elif response.status == httplib.NOT_FOUND:
-            raise ObjectDoesNotExistError(None, self, object_name)
-
-        raise LibcloudError('Unexpected status code: %s' % (response.status))
-
-    def get_container_cdn_url(self, container):
-        # pylint: disable=unexpected-keyword-arg
-        container_name_encoded = self._encode_container_name(container.name)
-        response = self.connection.request('/%s' % (container_name_encoded),
-                                           method='HEAD',
-                                           cdn_request=True)
-
-        if response.status == httplib.NO_CONTENT:
-            cdn_url = response.headers['x-cdn-uri']
-            return cdn_url
-        elif response.status == httplib.NOT_FOUND:
-            raise ContainerDoesNotExistError(value='',
-                                             container_name=container.name,
-                                             driver=self)
-
-        raise LibcloudError('Unexpected status code: %s' % (response.status))
-
-    def get_object_cdn_url(self, obj):
-        container_cdn_url = self.get_container_cdn_url(container=obj.container)
-        return '%s/%s' % (container_cdn_url, obj.name)
-
-    def enable_container_cdn(self, container, ex_ttl=None):
-        """
-        @inherits: :class:`StorageDriver.enable_container_cdn`
-
-        :param ex_ttl: cache time to live
-        :type ex_ttl: ``int``
-        """
-        container_name = self._encode_container_name(container.name)
-        headers = {'X-CDN-Enabled': 'True'}
-
-        if ex_ttl:
-            headers['X-TTL'] = ex_ttl
-
-        # pylint: disable=unexpected-keyword-arg
-        response = self.connection.request('/%s' % (container_name),
-                                           method='PUT',
-                                           headers=headers,
-                                           cdn_request=True)
-
-        return response.status in [httplib.CREATED, httplib.ACCEPTED]
-
-    def create_container(self, container_name):
-        container_name_encoded = self._encode_container_name(container_name)
-        response = self.connection.request(
-            '/%s' % (container_name_encoded), method='PUT')
-
-        if response.status == httplib.CREATED:
-            # Accepted mean that container is not yet created but it will be
-            # eventually
-            extra = {'object_count': 0}
-            container = Container(name=container_name,
-                                  extra=extra, driver=self)
-
-            return container
-        elif response.status == httplib.ACCEPTED:
-            error = ContainerAlreadyExistsError(None, self, container_name)
-            raise error
-
-        raise LibcloudError('Unexpected status code: %s' % (response.status))
-
-    def delete_container(self, container):
-        name = self._encode_container_name(container.name)
-
-        # Only empty container can be deleted
-        response = self.connection.request('/%s' % (name), method='DELETE')
-
-        if response.status == httplib.NO_CONTENT:
-            return True
-        elif response.status == httplib.NOT_FOUND:
-            raise ContainerDoesNotExistError(value='',
-                                             container_name=name, driver=self)
-        elif response.status == httplib.CONFLICT:
-            # @TODO: Add "delete_all_objects" parameter?
-            raise ContainerIsNotEmptyError(value='',
-                                           container_name=name, driver=self)
-
-    def download_object(self, obj, destination_path, overwrite_existing=False,
-                        delete_on_failure=True):
-        container_name = obj.container.name
-        object_name = obj.name
-        response = self.connection.request('/%s/%s' % (container_name,
-                                                       object_name),
-                                           method='GET', raw=True)
-
-        return self._get_object(
-            obj=obj, callback=self._save_object, response=response,
-            callback_kwargs={'obj': obj,
-                             'response': response.response,
-                             'destination_path': destination_path,
-                             'overwrite_existing': overwrite_existing,
-                             'delete_on_failure': delete_on_failure},
-            success_status_code=httplib.OK)
-
-    def download_object_as_stream(self, obj, chunk_size=None):
-        container_name = obj.container.name
-        object_name = obj.name
-        response = self.connection.request('/%s/%s' % (container_name,
-                                                       object_name),
-                                           method='GET', raw=True)
-
-        return self._get_object(obj=obj, callback=read_in_chunks,
-                                response=response,
-                                callback_kwargs={'iterator': response.response,
-                                                 'chunk_size': chunk_size},
-                                success_status_code=httplib.OK)
-
-    def upload_object(self, file_path, container, object_name, extra=None,
-                      verify_hash=True, headers=None):
-        """
-        Upload an object.
-
-        Note: This will override file with a same name if it already exists.
-        """
-        upload_func = self._upload_file
-        upload_func_kwargs = {'file_path': file_path}
-
-        return self._put_object(container=container, object_name=object_name,
-                                upload_func=upload_func,
-                                upload_func_kwargs=upload_func_kwargs,
-                                extra=extra, file_path=file_path,
-                                verify_hash=verify_hash, headers=headers)
-
-    def upload_object_via_stream(self, iterator,
-                                 container, object_name, extra=None,
-                                 headers=None):
-        if isinstance(iterator, file):
-            iterator = iter(iterator)
-
-        upload_func = self._stream_data
-        upload_func_kwargs = {'iterator': iterator}
-
-        return self._put_object(container=container, object_name=object_name,
-                                upload_func=upload_func,
-                                upload_func_kwargs=upload_func_kwargs,
-                                extra=extra, iterator=iterator,
-                                headers=headers)
-
-    def delete_object(self, obj):
-        container_name = self._encode_container_name(obj.container.name)
-        object_name = self._encode_object_name(obj.name)
-
-        response = self.connection.request(
-            '/%s/%s' % (container_name, object_name), method='DELETE')
-
-        if response.status == httplib.NO_CONTENT:
-            return True
-        elif response.status == httplib.NOT_FOUND:
-            raise ObjectDoesNotExistError(value='', object_name=object_name,
-                                          driver=self)
-
-        raise LibcloudError('Unexpected status code: %s' % (response.status))
-
-    def ex_purge_object_from_cdn(self, obj, email=None):
-        """
-        Purge edge cache for the specified object.
-
-        :param email: Email where a notification will be sent when the job
-        completes. (optional)
-        :type email: ``str``
-        """
-        container_name = self._encode_container_name(obj.container.name)
-        object_name = self._encode_object_name(obj.name)
-        headers = {'X-Purge-Email': email} if email else {}
-
-        # pylint: disable=unexpected-keyword-arg
-        response = self.connection.request('/%s/%s' % (container_name,
-                                                       object_name),
-                                           method='DELETE',
-                                           headers=headers,
-                                           cdn_request=True)
-
-        return response.status == httplib.NO_CONTENT
-
-    def ex_get_meta_data(self):
-        """
-        Get meta data
-
-        :rtype: ``dict``
-        """
-        response = self.connection.request('', method='HEAD')
-
-        if response.status == httplib.NO_CONTENT:
-            container_count = response.headers.get(
-                'x-account-container-count', 'unknown')
-            object_count = response.headers.get(
-                'x-account-object-count', 'unknown')
-            bytes_used = response.headers.get(
-                'x-account-bytes-used', 'unknown')
-            temp_url_key = response.headers.get(
-                'x-account-meta-temp-url-key', None)
-
-            return {'container_count': int(container_count),
-                    'object_count': int(object_count),
-                    'bytes_used': int(bytes_used),
-                    'temp_url_key': temp_url_key}
-
-        raise LibcloudError('Unexpected status code: %s' % (response.status))
-
-    def ex_multipart_upload_object(self, file_path, container, object_name,
-                                   chunk_size=33554432, extra=None,
-                                   verify_hash=True):
-        object_size = os.path.getsize(file_path)
-        if object_size < chunk_size:
-            return self.upload_object(file_path, container, object_name,
-                                      extra=extra, verify_hash=verify_hash)
-
-        iter_chunk_reader = FileChunkReader(file_path, chunk_size)
-
-        for index, iterator in enumerate(iter_chunk_reader):
-            self._upload_object_part(container=container,
-                                     object_name=object_name,
-                                     part_number=index,
-                                     iterator=iterator,
-                                     verify_hash=verify_hash)
-
-        return self._upload_object_manifest(container=container,
-                                            object_name=object_name,
-                                            extra=extra,
-                                            verify_hash=verify_hash)
-
-    def ex_enable_static_website(self, container, index_file='index.html'):
-        """
-        Enable serving a static website.
-
-        :param container: Container instance
-        :type container: :class:`Container`
-
-        :param index_file: Name of the object which becomes an index page for
-        every sub-directory in this container.
-        :type index_file: ``str``
-
-        :rtype: ``bool``
-        """
-        container_name = container.name
-        headers = {'X-Container-Meta-Web-Index': index_file}
-
-        # pylint: disable=unexpected-keyword-arg
-        response = self.connection.request('/%s' % (container_name),
-                                           method='POST',
-                                           headers=headers,
-                                           cdn_request=False)
-
-        return response.status in [httplib.CREATED, httplib.ACCEPTED]
-
-    def ex_set_error_page(self, container, file_name='error.html'):
-        """
-        Set a custom error page which is displayed if file is not found and
-        serving of a static website is enabled.
-
-        :param container: Container instance
-        :type container: :class:`Container`
-
-        :param file_name: Name of the object which becomes the error page.
-        :type file_name: ``str``
-
-        :rtype: ``bool``
-        """
-        container_name = container.name
-        headers = {'X-Container-Meta-Web-Error': file_name}
-
-        # pylint: disable=unexpected-keyword-arg
-        response = self.connection.request('/%s' % (container_name),
-                                           method='POST',
-                                           headers=headers,
-                                           cdn_request=False)
-
-        return response.status in [httplib.CREATED, httplib.ACCEPTED]
-
-    def ex_set_account_metadata_temp_url_key(self, key):
-        """
-        Set the metadata header X-Account-Meta-Temp-URL-Key on your Cloud
-        Files account.
-
-        :param key: X-Account-Meta-Temp-URL-Key
-        :type key: ``str``
-
-        :rtype: ``bool``
-        """
-        headers = {'X-Account-Meta-Temp-URL-Key': key}
-
-        # pylint: disable=unexpected-keyword-arg
-        response = self.connection.request('',
-                                           method='POST',
-                                           headers=headers,
-                                           cdn_request=False)
-
-        return response.status in [httplib.OK, httplib.NO_CONTENT,
-                                   httplib.CREATED, httplib.ACCEPTED]
-
-    def ex_get_object_temp_url(self, obj, method='GET', timeout=60):
-        """
-        Create a temporary URL to allow others to retrieve or put objects
-        in your Cloud Files account for as long or as short a time as you
-        wish.  This method is specifically for allowing users to retrieve
-        or update an object.
-
-        :param obj: The object that you wish to make temporarily public
-        :type obj: :class:`Object`
-
-        :param method: Which method you would like to allow, 'PUT' or 'GET'
-        :type method: ``str``
-
-        :param timeout: Time (in seconds) after which you want the TempURL
-        to expire.
-        :type timeout: ``int``
-
-        :rtype: ``bool``
-        """
-        # pylint: disable=no-member
-        self.connection._populate_hosts_and_request_paths()
-        expires = int(time() + timeout)
-        path = '%s/%s/%s' % (self.connection.request_path,
-                             obj.container.name, obj.name)
-        try:
-            key = self.ex_get_meta_data()['temp_url_key']
-            assert key is not None
-        except Exception:
-            raise KeyError('You must first set the ' +
-                           'X-Account-Meta-Temp-URL-Key header on your ' +
-                           'Cloud Files account using ' +
-                           'ex_set_account_metadata_temp_url_key before ' +
-                           'you can use this method.')
-        hmac_body = '%s\n%s\n%s' % (method, expires, path)
-        sig = hmac.new(b(key), b(hmac_body), sha1).hexdigest()
-        params = urlencode({'temp_url_sig': sig,
-                            'temp_url_expires': expires})
-
-        temp_url = 'https://%s/%s/%s?%s' %\
-                   (self.connection.host + self.connection.request_path,
-                    obj.container.name, obj.name, params)
-
-        return temp_url
-
-    def _upload_object_part(self, container, object_name, part_number,
-                            iterator, verify_hash=True):
-        upload_func = self._stream_data
-        upload_func_kwargs = {'iterator': iterator}
-        part_name = object_name + '/%08d' % part_number
-        extra = {'content_type': 'application/octet-stream'}
-
-        self._put_object(container=container,
-                         object_name=part_name,
-                         upload_func=upload_func,
-                         upload_func_kwargs=upload_func_kwargs,
-                         extra=extra, iterator=iterator,
-                         verify_hash=verify_hash)
-
-    def _upload_object_manifest(self, container, object_name, extra=None,
-                                verify_hash=True):
-        extra = extra or {}
-        meta_data = extra.get('meta_data')
-
-        container_name_encoded = self._encode_container_name(container.name)
-        object_name_encoded = self._encode_object_name(object_name)
-        request_path = '/%s/%s' % (container_name_encoded, object_name_encoded)
-
-        # pylint: disable=no-member
-        headers = {'X-Auth-Token': self.connection.auth_token,
-                   'X-Object-Manifest': '%s/%s/' %
-                                        (container_name_encoded,
-                                         object_name_encoded)}
-
-        data = ''
-        response = self.connection.request(request_path,
-                                           method='PUT', data=data,
-                                           headers=headers, raw=True)
-
-        object_hash = None
-
-        if verify_hash:
-            hash_function = self._get_hash_function()
-            hash_function.update(b(data))
-            data_hash = hash_function.hexdigest()
-            object_hash = response.headers.get('etag')
-
-            if object_hash != data_hash:
-                raise ObjectHashMismatchError(
-                    value=('MD5 hash checksum does not match (expected=%s, ' +
-                           'actual=%s)') %
-                          (data_hash, object_hash),
-                    object_name=object_name, driver=self)
-
-        obj = Object(name=object_name, size=0, hash=object_hash, extra=None,
-                     meta_data=meta_data, container=container, driver=self)
-
-        return obj
-
-    def list_container_objects(self, container, ex_prefix=None):
-        """
-        Return a list of objects for the given container.
-
-        :param container: Container instance.
-        :type container: :class:`Container`
-
-        :param ex_prefix: Only get objects with names starting with ex_prefix
-        :type ex_prefix: ``str``
-
-        :return: A list of Object instances.
-        :rtype: ``list`` of :class:`Object`
-        """
-        return list(self.iterate_container_objects(container,
-                                                   ex_prefix=ex_prefix))
-
-    def iterate_container_objects(self, container, ex_prefix=None):
-        """
-        Return a generator of objects for the given container.
-
-        :param container: Container instance
-        :type container: :class:`Container`
-
-        :param ex_prefix: Only get objects with names starting with ex_prefix
-        :type ex_prefix: ``str``
-
-        :return: A generator of Object instances.
-        :rtype: ``generator`` of :class:`Object`
-        """
-        params = {}
-        if ex_prefix:
-            params['prefix'] = ex_prefix
-
-        while True:
-            container_name_encoded = \
-                self._encode_container_name(container.name)
-            response = self.connection.request('/%s' %
-                                               (container_name_encoded),
-                                               params=params)
-
-            if response.status == httplib.NO_CONTENT:
-                # Empty or non-existent container
-                break
-            elif response.status == httplib.OK:
-                objects = self._to_object_list(json.loads(response.body),
-                                               container)
-
-                if len(objects) == 0:
-                    break
-
-                for obj in objects:
-                    yield obj
-                params['marker'] = obj.name
-
-            else:
-                raise LibcloudError('Unexpected status code: %s' %
-                                    (response.status))
-
-    def _put_object(self, container, object_name, upload_func,
-                    upload_func_kwargs, extra=None, file_path=None,
-                    iterator=None, verify_hash=True, headers=None):
-        extra = extra or {}
-        container_name_encoded = self._encode_container_name(container.name)
-        object_name_encoded = self._encode_object_name(object_name)
-        content_type = extra.get('content_type', None)
-        meta_data = extra.get('meta_data', None)
-        content_disposition = extra.get('content_disposition', None)
-
-        headers = headers or {}
-        if meta_data:
-            for key, value in list(meta_data.items()):
-                key = 'X-Object-Meta-%s' % (key)
-                headers[key] = value
-
-        if content_disposition is not None:
-            headers['Content-Disposition'] = content_disposition
-
-        request_path = '/%s/%s' % (container_name_encoded, object_name_encoded)
-        result_dict = self._upload_object(
-            object_name=object_name, content_type=content_type,
-            upload_func=upload_func, upload_func_kwargs=upload_func_kwargs,
-            request_path=request_path, request_method='PUT',
-            headers=headers, file_path=file_path, iterator=iterator)
-
-        response = result_dict['response'].response
-        bytes_transferred = result_dict['bytes_transferred']
-        server_hash = result_dict['response'].headers.get('etag', None)
-
-        if response.status == httplib.EXPECTATION_FAILED:
-            raise LibcloudError(value='Missing content-type header',
-                                driver=self)
-        elif verify_hash and not server_hash:
-            raise LibcloudError(value='Server didn\'t return etag',
-                                driver=self)
-        elif (verify_hash and result_dict['data_hash'] != server_hash):
-            raise ObjectHashMismatchError(
-                value=('MD5 hash checksum does not match (expected=%s, ' +
-                       'actual=%s)') % (result_dict['data_hash'], server_hash),
-                object_name=object_name, driver=self)
-        elif response.status == httplib.CREATED:
-            obj = Object(
-                name=object_name, size=bytes_transferred, hash=server_hash,
-                extra=None, meta_data=meta_data, container=container,
-                driver=self)
-
-            return obj
-        else:
-            # @TODO: Add test case for this condition (probably 411)
-            raise LibcloudError('status_code=%s' % (response.status),
-                                driver=self)
-
-    def _encode_container_name(self, name):
-        """
-        Encode container name so it can be used as part of the HTTP request.
-        """
-        if name.startswith('/'):
-            name = name[1:]
-        name = urlquote(name)
-
-        if name.find('/') != -1:
-            raise InvalidContainerNameError(value='Container name cannot'
-                                                  ' contain slashes',
-                                            container_name=name, driver=self)
-
-        if len(name) > 256:
-            raise InvalidContainerNameError(
-                value='Container name cannot be longer than 256 bytes',
-                container_name=name, driver=self)
-
-        return name
-
-    def _encode_object_name(self, name):
-        name = urlquote(name)
-        return name
-
-    def _to_container_list(self, response):
-        # @TODO: Handle more than 10k containers - use "lazy list"?
-        for container in response:
-            extra = {'object_count': int(container['count']),
-                     'size': int(container['bytes'])}
-            yield Container(name=container['name'], extra=extra, driver=self)
-
-    def _to_object_list(self, response, container):
-        objects = []
-
-        for obj in response:
-            name = obj['name']
-            size = int(obj['bytes'])
-            hash = obj['hash']
-            extra = {'content_type': obj['content_type'],
-                     'last_modified': obj['last_modified']}
-            objects.append(Object(
-                name=name, size=size, hash=hash, extra=extra,
-                meta_data=None, container=container, driver=self))
-
-        return objects
-
-    def _headers_to_container(self, name, headers):
-        size = int(headers.get('x-container-bytes-used', 0))
-        object_count = int(headers.get('x-container-object-count', 0))
-
-        extra = {'object_count': object_count,
-                 'size': size}
-        container = Container(name=name, extra=extra, driver=self)
-        return container
-
-    def _headers_to_object(self, name, container, headers):
-        size = int(headers.pop('content-length', 0))
-        last_modified = headers.pop('last-modified', None)
-        etag = headers.pop('etag', None)
-        content_type = headers.pop('content-type', None)
-
-        meta_data = {}
-        for key, value in list(headers.items()):
-            if key.find('x-object-meta-') != -1:
-                key = key.replace('x-object-meta-', '')
-                meta_data[key] = value
-
-        extra = {'content_type': content_type, 'last_modified': last_modified}
-
-        obj = Object(name=name, size=size, hash=etag, extra=extra,
-                     meta_data=meta_data, container=container, driver=self)
-        return obj
-
-    def _ex_connection_class_kwargs(self):
-        kwargs = self.openstack_connection_kwargs()
-        kwargs['ex_force_service_region'] = self.region
-        kwargs['use_internal_url'] = self.use_internal_url
-        return kwargs
-
-
-class OpenStackSwiftStorageDriver(CloudFilesStorageDriver):
-    """
-    Storage driver for the OpenStack Swift.
-    """
-    type = Provider.CLOUDFILES_SWIFT
-    name = 'OpenStack Swift'
-    connectionCls = OpenStackSwiftConnection
-
-    # TODO: Reverse the relationship - Swift -> CloudFiles
-
-    def __init__(self, key, secret=None, secure=True, host=None, port=None,
-                 region=None, **kwargs):
-        super(OpenStackSwiftStorageDriver, self).__init__(key=key,
-                                                          secret=secret,
-                                                          secure=secure,
-                                                          host=host,
-                                                          port=port,
-                                                          region=region,
-                                                          **kwargs)
-
-
-class FileChunkReader(object):
-    def __init__(self, file_path, chunk_size):
-        self.file_path = file_path
-        self.total = os.path.getsize(file_path)
-        self.chunk_size = chunk_size
-        self.bytes_read = 0
-        self.stop_iteration = False
-
-    def __iter__(self):
-        return self
-
-    def next(self):
-        if self.stop_iteration:
-            raise StopIteration
-
-        start_block = self.bytes_read
-        end_block = start_block + self.chunk_size
-        if end_block >= self.total:
-            end_block = self.total
-            self.stop_iteration = True
-        self.bytes_read += end_block - start_block
-        return ChunkStreamReader(file_path=self.file_path,
-                                 start_block=start_block,
-                                 end_block=end_block,
-                                 chunk_size=8192)
-
-    def __next__(self):
-        return self.next()
-
-
-class ChunkStreamReader(object):
-    def __init__(self, file_path, start_block, end_block, chunk_size):
-        self.fd = open(file_path, 'rb')
-        self.fd.seek(start_block)
-        self.start_block = start_block
-        self.end_block = end_block
-        self.chunk_size = chunk_size
-        self.bytes_read = 0
-        self.stop_iteration = False
-
-    def __iter__(self):
-        return self
-
-    def next(self):
-        if self.stop_iteration:
-            self.fd.close()
-            raise StopIteration
-
-        block_size = self.chunk_size
-        if self.bytes_read + block_size > \
-                self.end_block - self.start_block:
-            block_size = self.end_block - self.start_block - self.bytes_read
-            self.stop_iteration = True
-
-        block = self.fd.read(block_size)
-        self.bytes_read += block_size
-        return block
-
-    def __next__(self):
-        return self.next()

http://git-wip-us.apache.org/repos/asf/libcloud/blob/8afcda91/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/dummy.py
----------------------------------------------------------------------
diff --git a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/dummy.py b/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/dummy.py
deleted file mode 100644
index affd265..0000000
--- a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/dummy.py
+++ /dev/null
@@ -1,490 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os.path
-import random
-import hashlib
-
-from libcloud.utils.py3 import PY3
-from libcloud.utils.py3 import b
-
-if PY3:
-    from io import FileIO as file
-
-from libcloud.common.types import LibcloudError
-
-from libcloud.storage.base import Object, Container, StorageDriver
-from libcloud.storage.types import ContainerAlreadyExistsError
-from libcloud.storage.types import ContainerDoesNotExistError
-from libcloud.storage.types import ContainerIsNotEmptyError
-from libcloud.storage.types import ObjectDoesNotExistError
-
-
-class DummyFileObject(file):
-    def __init__(self, yield_count=5, chunk_len=10):
-        self._yield_count = yield_count
-        self._chunk_len = chunk_len
-
-    def read(self, size):
-        i = 0
-
-        while i < self._yield_count:
-            yield self._get_chunk(self._chunk_len)
-            i += 1
-
-        raise StopIteration
-
-    def _get_chunk(self, chunk_len):
-        chunk = [str(x) for x in random.randint(97, 120)]
-        return chunk
-
-    def __len__(self):
-        return self._yield_count * self._chunk_len
-
-
-class DummyIterator(object):
-    def __init__(self, data=None):
-        self.hash = hashlib.md5()
-        self._data = data or []
-        self._current_item = 0
-
-    def get_md5_hash(self):
-        return self.hash.hexdigest()
-
-    def next(self):
-        if self._current_item == len(self._data):
-            raise StopIteration
-
-        value = self._data[self._current_item]
-        self.hash.update(b(value))
-        self._current_item += 1
-        return value
-
-    def __next__(self):
-        return self.next()
-
-
-class DummyStorageDriver(StorageDriver):
-    """
-    Dummy Storage driver.
-
-    >>> from libcloud.storage.drivers.dummy import DummyStorageDriver
-    >>> driver = DummyStorageDriver('key', 'secret')
-    >>> container = driver.create_container(container_name='test container')
-    >>> container
-    <Container: name=test container, provider=Dummy Storage Provider>
-    >>> container.name
-    'test container'
-    >>> container.extra['object_count']
-    0
-    """
-
-    name = 'Dummy Storage Provider'
-    website = 'http://example.com'
-
-    def __init__(self, api_key, api_secret):
-        """
-        :param    api_key:    API key or username to used (required)
-        :type     api_key:    ``str``
-        :param    api_secret: Secret password to be used (required)
-        :type     api_secret: ``str``
-        :rtype: ``None``
-        """
-        self._containers = {}
-
-    def get_meta_data(self):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> driver.get_meta_data()['object_count']
-        0
-        >>> driver.get_meta_data()['container_count']
-        0
-        >>> driver.get_meta_data()['bytes_used']
-        0
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container_name = 'test container 2'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> obj = container.upload_object_via_stream(
-        ...  object_name='test object', iterator=DummyFileObject(5, 10),
-        ...  extra={})
-        >>> driver.get_meta_data()['object_count']
-        1
-        >>> driver.get_meta_data()['container_count']
-        2
-        >>> driver.get_meta_data()['bytes_used']
-        50
-
-        :rtype: ``dict``
-        """
-
-        container_count = len(self._containers)
-        object_count = sum([len(self._containers[container]['objects']) for
-                            container in self._containers])
-
-        bytes_used = 0
-        for container in self._containers:
-            objects = self._containers[container]['objects']
-            for _, obj in objects.items():
-                bytes_used += obj.size
-
-        return {'container_count': int(container_count),
-                'object_count': int(object_count),
-                'bytes_used': int(bytes_used)}
-
-    def iterate_containers(self):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> list(driver.iterate_containers())
-        []
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container
-        <Container: name=test container 1, provider=Dummy Storage Provider>
-        >>> container.name
-        'test container 1'
-        >>> container_name = 'test container 2'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container
-        <Container: name=test container 2, provider=Dummy Storage Provider>
-        >>> container = driver.create_container(
-        ...  container_name='test container 2')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ContainerAlreadyExistsError:
-        >>> container_list=list(driver.iterate_containers())
-        >>> sorted([c.name for c in container_list])
-        ['test container 1', 'test container 2']
-
-        @inherits: :class:`StorageDriver.iterate_containers`
-        """
-
-        for container in list(self._containers.values()):
-            yield container['container']
-
-    def list_container_objects(self, container):
-        container = self.get_container(container.name)
-
-        return container.objects
-
-    def get_container(self, container_name):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> driver.get_container('unknown') #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ContainerDoesNotExistError:
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container
-        <Container: name=test container 1, provider=Dummy Storage Provider>
-        >>> container.name
-        'test container 1'
-        >>> driver.get_container('test container 1')
-        <Container: name=test container 1, provider=Dummy Storage Provider>
-
-        @inherits: :class:`StorageDriver.get_container`
-        """
-
-        if container_name not in self._containers:
-            raise ContainerDoesNotExistError(driver=self, value=None,
-                                             container_name=container_name)
-
-        return self._containers[container_name]['container']
-
-    def get_container_cdn_url(self, container):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> driver.get_container('unknown') #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ContainerDoesNotExistError:
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container
-        <Container: name=test container 1, provider=Dummy Storage Provider>
-        >>> container.name
-        'test container 1'
-        >>> container.get_cdn_url()
-        'http://www.test.com/container/test_container_1'
-
-        @inherits: :class:`StorageDriver.get_container_cdn_url`
-        """
-
-        if container.name not in self._containers:
-            raise ContainerDoesNotExistError(driver=self, value=None,
-                                             container_name=container.name)
-
-        return self._containers[container.name]['cdn_url']
-
-    def get_object(self, container_name, object_name):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> driver.get_object('unknown', 'unknown')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ContainerDoesNotExistError:
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container
-        <Container: name=test container 1, provider=Dummy Storage Provider>
-        >>> driver.get_object(
-        ...  'test container 1', 'unknown') #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ObjectDoesNotExistError:
-        >>> obj = container.upload_object_via_stream(object_name='test object',
-        ...      iterator=DummyFileObject(5, 10), extra={})
-        >>> obj.name
-        'test object'
-        >>> obj.size
-        50
-
-        @inherits: :class:`StorageDriver.get_object`
-        """
-
-        self.get_container(container_name)
-        container_objects = self._containers[container_name]['objects']
-        if object_name not in container_objects:
-            raise ObjectDoesNotExistError(object_name=object_name, value=None,
-                                          driver=self)
-
-        return container_objects[object_name]
-
-    def get_object_cdn_url(self, obj):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container
-        <Container: name=test container 1, provider=Dummy Storage Provider>
-        >>> obj = container.upload_object_via_stream(
-        ...      object_name='test object 5',
-        ...      iterator=DummyFileObject(5, 10), extra={})
-        >>> obj.name
-        'test object 5'
-        >>> obj.get_cdn_url()
-        'http://www.test.com/object/test_object_5'
-
-        @inherits: :class:`StorageDriver.get_object_cdn_url`
-        """
-
-        container_name = obj.container.name
-        container_objects = self._containers[container_name]['objects']
-        if obj.name not in container_objects:
-            raise ObjectDoesNotExistError(object_name=obj.name, value=None,
-                                          driver=self)
-
-        return container_objects[obj.name].meta_data['cdn_url']
-
-    def create_container(self, container_name):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container
-        <Container: name=test container 1, provider=Dummy Storage Provider>
-        >>> container = driver.create_container(
-        ...    container_name='test container 1')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ContainerAlreadyExistsError:
-
-        @inherits: :class:`StorageDriver.create_container`
-        """
-
-        if container_name in self._containers:
-            raise ContainerAlreadyExistsError(container_name=container_name,
-                                              value=None, driver=self)
-
-        extra = {'object_count': 0}
-        container = Container(name=container_name, extra=extra, driver=self)
-
-        self._containers[container_name] = {'container': container,
-                                            'objects': {},
-                                            'cdn_url':
-                                            'http://www.test.com/container/%s'
-                                            %
-                                            (container_name.replace(' ', '_'))
-                                            }
-        return container
-
-    def delete_container(self, container):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> container = Container(name = 'test container',
-        ...    extra={'object_count': 0}, driver=driver)
-        >>> driver.delete_container(container=container)
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ContainerDoesNotExistError:
-        >>> container = driver.create_container(
-        ...      container_name='test container 1')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        >>> len(driver._containers)
-        1
-        >>> driver.delete_container(container=container)
-        True
-        >>> len(driver._containers)
-        0
-        >>> container = driver.create_container(
-        ...    container_name='test container 1')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        >>> obj = container.upload_object_via_stream(
-        ...   object_name='test object', iterator=DummyFileObject(5, 10),
-        ...   extra={})
-        >>> driver.delete_container(container=container)
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ContainerIsNotEmptyError:
-
-        @inherits: :class:`StorageDriver.delete_container`
-        """
-
-        container_name = container.name
-        if container_name not in self._containers:
-            raise ContainerDoesNotExistError(container_name=container_name,
-                                             value=None, driver=self)
-
-        container = self._containers[container_name]
-        if len(container['objects']) > 0:
-            raise ContainerIsNotEmptyError(container_name=container_name,
-                                           value=None, driver=self)
-
-        del self._containers[container_name]
-        return True
-
-    def download_object(self, obj, destination_path, overwrite_existing=False,
-                        delete_on_failure=True):
-        kwargs_dict = {'obj': obj,
-                       'response': DummyFileObject(),
-                       'destination_path': destination_path,
-                       'overwrite_existing': overwrite_existing,
-                       'delete_on_failure': delete_on_failure}
-
-        return self._save_object(**kwargs_dict)
-
-    def download_object_as_stream(self, obj, chunk_size=None):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> container = driver.create_container(
-        ...   container_name='test container 1')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        >>> obj = container.upload_object_via_stream(object_name='test object',
-        ...    iterator=DummyFileObject(5, 10), extra={})
-        >>> stream = container.download_object_as_stream(obj)
-        >>> stream #doctest: +ELLIPSIS
-        <...closed...>
-
-        @inherits: :class:`StorageDriver.download_object_as_stream`
-        """
-
-        return DummyFileObject()
-
-    def upload_object(self, file_path, container, object_name, extra=None,
-                      file_hash=None):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> container_name = 'test container 1'
-        >>> container = driver.create_container(container_name=container_name)
-        >>> container.upload_object(file_path='/tmp/inexistent.file',
-        ...     object_name='test') #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        LibcloudError:
-        >>> file_path = path = os.path.abspath(__file__)
-        >>> file_size = os.path.getsize(file_path)
-        >>> obj = container.upload_object(file_path=file_path,
-        ...                               object_name='test')
-        >>> obj #doctest: +ELLIPSIS
-        <Object: name=test, size=...>
-        >>> obj.size == file_size
-        True
-
-        @inherits: :class:`StorageDriver.upload_object`
-        :param file_hash: File hash
-        :type file_hash: ``str``
-        """
-
-        if not os.path.exists(file_path):
-            raise LibcloudError(value='File %s does not exist' % (file_path),
-                                driver=self)
-
-        size = os.path.getsize(file_path)
-        return self._add_object(container=container, object_name=object_name,
-                                size=size, extra=extra)
-
-    def upload_object_via_stream(self, iterator, container,
-                                 object_name, extra=None):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> container = driver.create_container(
-        ...    container_name='test container 1')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        >>> obj = container.upload_object_via_stream(
-        ...   object_name='test object', iterator=DummyFileObject(5, 10),
-        ...   extra={})
-        >>> obj #doctest: +ELLIPSIS
-        <Object: name=test object, size=50, ...>
-
-        @inherits: :class:`StorageDriver.upload_object_via_stream`
-        """
-
-        size = len(iterator)
-        return self._add_object(container=container, object_name=object_name,
-                                size=size, extra=extra)
-
-    def delete_object(self, obj):
-        """
-        >>> driver = DummyStorageDriver('key', 'secret')
-        >>> container = driver.create_container(
-        ...   container_name='test container 1')
-        ... #doctest: +IGNORE_EXCEPTION_DETAIL
-        >>> obj = container.upload_object_via_stream(object_name='test object',
-        ...   iterator=DummyFileObject(5, 10), extra={})
-        >>> obj #doctest: +ELLIPSIS
-        <Object: name=test object, size=50, ...>
-        >>> container.delete_object(obj=obj)
-        True
-        >>> obj = Object(name='test object 2',
-        ...    size=1000, hash=None, extra=None,
-        ...    meta_data=None, container=container,driver=None)
-        >>> container.delete_object(obj=obj) #doctest: +IGNORE_EXCEPTION_DETAIL
-        Traceback (most recent call last):
-        ObjectDoesNotExistError:
-
-        @inherits: :class:`StorageDriver.delete_object`
-        """
-
-        container_name = obj.container.name
-        object_name = obj.name
-        obj = self.get_object(container_name=container_name,
-                              object_name=object_name)
-
-        del self._containers[container_name]['objects'][object_name]
-        return True
-
-    def _add_object(self, container, object_name, size, extra=None):
-        container = self.get_container(container.name)
-
-        extra = extra or {}
-        meta_data = extra.get('meta_data', {})
-        meta_data.update({'cdn_url': 'http://www.test.com/object/%s' %
-                          (object_name.replace(' ', '_'))})
-        obj = Object(name=object_name, size=size, extra=extra, hash=None,
-                     meta_data=meta_data, container=container, driver=self)
-
-        self._containers[container.name]['objects'][object_name] = obj
-        return obj
-
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()

http://git-wip-us.apache.org/repos/asf/libcloud/blob/8afcda91/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/google_storage.py
----------------------------------------------------------------------
diff --git a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/google_storage.py b/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/google_storage.py
deleted file mode 100644
index 580a29c..0000000
--- a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/google_storage.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-import email.utils
-
-from libcloud.common.base import ConnectionUserAndKey
-from libcloud.common.google import GoogleAuthType
-from libcloud.common.google import GoogleOAuth2Credential
-from libcloud.storage.drivers.s3 import BaseS3Connection
-from libcloud.storage.drivers.s3 import BaseS3StorageDriver
-from libcloud.storage.drivers.s3 import S3RawResponse
-from libcloud.storage.drivers.s3 import S3Response
-
-# Docs are a lie. Actual namespace returned is different that the one listed
-# in the docs.
-SIGNATURE_IDENTIFIER = 'GOOG1'
-API_VERSION = '2006-03-01'
-NAMESPACE = 'http://doc.s3.amazonaws.com/%s' % (API_VERSION)
-
-
-class GoogleStorageConnection(ConnectionUserAndKey):
-    """
-    Represents a single connection to the Google storage API endpoint.
-
-    This can either authenticate via the Google OAuth2 methods or via
-    the S3 HMAC interoperability method.
-    """
-
-    host = 'storage.googleapis.com'
-    responseCls = S3Response
-    rawResponseCls = S3RawResponse
-    PROJECT_ID_HEADER = 'x-goog-project-id'
-
-    def __init__(self, user_id, key, secure, auth_type=None,
-                 credential_file=None, **kwargs):
-        self.auth_type = auth_type or GoogleAuthType.guess_type(user_id)
-        if GoogleAuthType.is_oauth2(self.auth_type):
-            self.oauth2_credential = GoogleOAuth2Credential(
-                user_id, key, self.auth_type, credential_file, **kwargs)
-        else:
-            self.oauth2_credential = None
-        super(GoogleStorageConnection, self).__init__(user_id, key, secure,
-                                                      **kwargs)
-
-    def add_default_headers(self, headers):
-        date = email.utils.formatdate(usegmt=True)
-        headers['Date'] = date
-        project = self.get_project()
-        if project:
-            headers[self.PROJECT_ID_HEADER] = project
-        return headers
-
-    def get_project(self):
-        return getattr(self.driver, 'project')
-
-    def pre_connect_hook(self, params, headers):
-        if self.auth_type == GoogleAuthType.GCS_S3:
-            signature = self._get_s3_auth_signature(params, headers)
-            headers['Authorization'] = '%s %s:%s' % (SIGNATURE_IDENTIFIER,
-                                                     self.user_id, signature)
-        else:
-            headers['Authorization'] = ('Bearer ' +
-                                        self.oauth2_credential.access_token)
-        return params, headers
-
-    def _get_s3_auth_signature(self, params, headers):
-        """Hacky wrapper to work with S3's get_auth_signature."""
-        headers_copy = {}
-        params_copy = copy.deepcopy(params)
-
-        # Lowercase all headers except 'date' and Google header values
-        for k, v in headers.items():
-            k_lower = k.lower()
-            if (k_lower == 'date' or k_lower.startswith(
-                    GoogleStorageDriver.http_vendor_prefix) or
-                    not isinstance(v, str)):
-                headers_copy[k_lower] = v
-            else:
-                headers_copy[k_lower] = v.lower()
-
-        return BaseS3Connection.get_auth_signature(
-            method=self.method,
-            headers=headers_copy,
-            params=params_copy,
-            expires=None,
-            secret_key=self.key,
-            path=self.action,
-            vendor_prefix=GoogleStorageDriver.http_vendor_prefix)
-
-
-class GoogleStorageDriver(BaseS3StorageDriver):
-    """
-    Driver for Google Cloud Storage.
-
-    Can authenticate via standard Google Cloud methods (Service Accounts,
-    Installed App credentials, and GCE instance service accounts)
-
-    Examples:
-
-    Service Accounts::
-
-        driver = GoogleStorageDriver(key=client_email, secret=private_key, ...)
-
-    Installed Application::
-
-        driver = GoogleStorageDriver(key=client_id, secret=client_secret, ...)
-
-    From GCE instance::
-
-        driver = GoogleStorageDriver(key=foo , secret=bar, ...)
-
-    Can also authenticate via Google Cloud Storage's S3 HMAC interoperability
-    API. S3 user keys are 20 alphanumeric characters, starting with GOOG.
-
-    Example::
-
-        driver = GoogleStorageDriver(key='GOOG0123456789ABCXYZ',
-                                     secret=key_secret)
-    """
-    name = 'Google Storage'
-    website = 'http://cloud.google.com/'
-    connectionCls = GoogleStorageConnection
-    hash_type = 'md5'
-    namespace = NAMESPACE
-    supports_chunked_encoding = False
-    supports_s3_multipart_upload = False
-    http_vendor_prefix = 'x-goog'
-
-    def __init__(self, key, secret=None, project=None, **kwargs):
-        self.project = project
-        super(GoogleStorageDriver, self).__init__(key, secret, **kwargs)

http://git-wip-us.apache.org/repos/asf/libcloud/blob/8afcda91/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/ktucloud.py
----------------------------------------------------------------------
diff --git a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/ktucloud.py b/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/ktucloud.py
deleted file mode 100644
index 385d445..0000000
--- a/apache-libcloud-1.0.0rc2/libcloud/storage/drivers/ktucloud.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from libcloud.common.types import LibcloudError
-from libcloud.storage.providers import Provider
-
-from libcloud.storage.drivers.cloudfiles import CloudFilesConnection
-from libcloud.storage.drivers.cloudfiles import CloudFilesStorageDriver
-
-KTUCLOUDSTORAGE_AUTH_URL = "https://ssproxy.ucloudbiz.olleh.com/auth/v1.0"
-KTUCLOUDSTORAGE_API_VERSION = "1.0"
-
-
-class KTUCloudStorageConnection(CloudFilesConnection):
-    """
-    Connection class for the KT UCloud Storage endpoint.
-    """
-
-    auth_url = KTUCLOUDSTORAGE_AUTH_URL
-    _auth_version = KTUCLOUDSTORAGE_API_VERSION
-
-    def get_endpoint(self):
-        eps = self.service_catalog.get_endpoints(name='cloudFiles')
-
-        if len(eps) == 0:
-            raise LibcloudError('Could not find specified endpoint')
-
-        ep = eps[0]
-        public_url = ep.url
-
-        if not public_url:
-            raise LibcloudError('Could not find specified endpoint')
-
-        return public_url
-
-
-class KTUCloudStorageDriver(CloudFilesStorageDriver):
-    """
-    Cloudfiles storage driver for the UK endpoint.
-    """
-
-    type = Provider.KTUCLOUD
-    name = 'KTUCloud Storage'
-    connectionCls = KTUCloudStorageConnection


Mime
View raw message