libcloud-notifications mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From anthonys...@apache.org
Subject [34/51] [abbrv] libcloud git commit: fix atmos tests
Date Mon, 09 Jan 2017 04:52:54 GMT
fix atmos tests


Project: http://git-wip-us.apache.org/repos/asf/libcloud/repo
Commit: http://git-wip-us.apache.org/repos/asf/libcloud/commit/610c6240
Tree: http://git-wip-us.apache.org/repos/asf/libcloud/tree/610c6240
Diff: http://git-wip-us.apache.org/repos/asf/libcloud/diff/610c6240

Branch: refs/heads/trunk
Commit: 610c624066a24b92871c0ebe5e13759e26ed58a0
Parents: 5e04dbc
Author: Anthony Shaw <anthonyshaw@apache.org>
Authored: Fri Jan 6 16:26:27 2017 +1100
Committer: Anthony Shaw <anthonyshaw@apache.org>
Committed: Fri Jan 6 16:26:27 2017 +1100

----------------------------------------------------------------------
 libcloud/test/storage/test_atmos.py |  30 +++++---
 libcloud/test/storage/test_s3.py    | 124 ++++++++++++-------------------
 2 files changed, 66 insertions(+), 88 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/libcloud/blob/610c6240/libcloud/test/storage/test_atmos.py
----------------------------------------------------------------------
diff --git a/libcloud/test/storage/test_atmos.py b/libcloud/test/storage/test_atmos.py
index 27287eb..d847fc0 100644
--- a/libcloud/test/storage/test_atmos.py
+++ b/libcloud/test/storage/test_atmos.py
@@ -33,7 +33,7 @@ from libcloud.storage.types import ContainerAlreadyExistsError, \
 from libcloud.storage.drivers.atmos import AtmosConnection, AtmosDriver
 from libcloud.storage.drivers.dummy import DummyIterator
 
-from libcloud.test import StorageMockHttp, MockRawResponse
+from libcloud.test import StorageMockHttp, MockRawResponse, MockResponse
 from libcloud.test.file_fixtures import StorageFileFixtures
 
 
@@ -290,12 +290,16 @@ class AtmosTests(unittest.TestCase):
         self.assertTrue(hasattr(stream, '__iter__'))
 
     def test_upload_object_success(self):
-        def upload_file(self, response, file_path, chunked=False,
-                        calculate_hash=True):
-            return True, 'hash343hhash89h932439jsaa89', 1000
+        def upload_file(self, object_name=None, content_type=None,
+            request_path=None, request_method=None,
+            headers=None, file_path=None, stream=None):
+            return {'response': MockResponse(200, headers={'etag': '0cc175b9c0f1b6a831c399e269772661'}),
+                'bytes_transferred': 1000,
+                'data_hash': '0cc175b9c0f1b6a831c399e269772661'}
+
+        old_func = AtmosDriver._upload_object
+        AtmosDriver._upload_object = upload_file
 
-        old_func = AtmosDriver._upload_file
-        AtmosDriver._upload_file = upload_file
         path = os.path.abspath(__file__)
         container = Container(name='fbc', extra={}, driver=self)
         object_name = 'ftu'
@@ -305,7 +309,7 @@ class AtmosTests(unittest.TestCase):
         self.assertEqual(obj.name, 'ftu')
         self.assertEqual(obj.size, 1000)
         self.assertTrue('some-value' in obj.meta_data)
-        AtmosDriver._upload_file = old_func
+        AtmosDriver._upload_object = old_func
 
     def test_upload_object_no_content_type(self):
         def no_content_type(name):
@@ -329,13 +333,13 @@ class AtmosTests(unittest.TestCase):
         def dummy_content_type(name):
             return 'application/zip', None
 
-        def send(instance):
-            raise Exception('')
+        def send(self, method, **kwargs):
+            raise LibcloudError('')
 
         old_func1 = libcloud.utils.files.guess_file_mime_type
         libcloud.utils.files.guess_file_mime_type = dummy_content_type
-        old_func2 = AtmosMockHttp.send
-        AtmosMockHttp.send = send
+        old_func2 = AtmosMockHttp.request
+        AtmosMockHttp.request = send
 
         file_path = os.path.abspath(__file__)
         container = Container(name='fbc', extra={}, driver=self)
@@ -352,7 +356,7 @@ class AtmosTests(unittest.TestCase):
                 'Timeout while uploading but an exception was not thrown')
         finally:
             libcloud.utils.files.guess_file_mime_type = old_func1
-            AtmosMockHttp.send = old_func2
+            AtmosMockHttp.request = old_func2
 
     def test_upload_object_nonexistent_file(self):
         def dummy_content_type(name):
@@ -742,6 +746,8 @@ class AtmosMockHttp(StorageMockHttp, unittest.TestCase):
         }
         return (httplib.OK, '', headers, httplib.responses[httplib.OK])
 
+    def _rest_namespace_fbc_ftu(self, method, url, body, headers):
+        return (httplib.CREATED, '', {}, httplib.responses[httplib.CREATED])
 
 class AtmosMockRawResponse(MockRawResponse):
     fixtures = StorageFileFixtures('atmos')

http://git-wip-us.apache.org/repos/asf/libcloud/blob/610c6240/libcloud/test/storage/test_s3.py
----------------------------------------------------------------------
diff --git a/libcloud/test/storage/test_s3.py b/libcloud/test/storage/test_s3.py
index f9e1628..e8dbc2b 100644
--- a/libcloud/test/storage/test_s3.py
+++ b/libcloud/test/storage/test_s3.py
@@ -18,6 +18,7 @@ import hmac
 import os
 import sys
 import unittest
+from io import BytesIO
 
 from hashlib import sha1
 
@@ -48,7 +49,7 @@ from libcloud.storage.drivers.s3 import CHUNK_SIZE
 from libcloud.storage.drivers.dummy import DummyIterator
 from libcloud.utils.py3 import b
 
-from libcloud.test import StorageMockHttp, MockRawResponse  # pylint: disable-msg=E0611
+from libcloud.test import StorageMockHttp, MockRawResponse, MockResponse  # pylint: disable-msg=E0611
 from libcloud.test import MockHttpTestCase  # pylint: disable-msg=E0611
 from libcloud.test.file_fixtures import StorageFileFixtures  # pylint: disable-msg=E0611
 from libcloud.test.secrets import STORAGE_S3_PARAMS
@@ -249,51 +250,7 @@ class S3MockHttp(StorageMockHttp, MockHttpTestCase):
         query_string = urlparse.urlsplit(url).query
         query = parse_qs(query_string)
 
-        if not query.get('uploadId', False):
-            self.fail('Request doesnt contain uploadId query parameter')
-
-        upload_id = query['uploadId'][0]
-        if upload_id != TEST_ID:
-            self.fail('first uploadId doesnt match TEST_ID')
-
-        if method == 'PUT':
-            # PUT is used for uploading the part. part number is mandatory
-            if not query.get('partNumber', False):
-                self.fail('Request is missing partNumber query parameter')
-
-            body = ''
-            return (httplib.OK,
-                    body,
-                    headers,
-                    httplib.responses[httplib.OK])
-
-        elif method == 'DELETE':
-            # DELETE is done for aborting the upload
-            body = ''
-            return (httplib.NO_CONTENT,
-                    body,
-                    headers,
-                    httplib.responses[httplib.NO_CONTENT])
-
-        else:
-            # POST is done for committing the upload. Parse the XML and
-            # check if the commit is proper (TODO: XML Schema based check?)
-            commit = ET.fromstring(body)
-            count = 0
-
-            for part in commit.findall('Part'):
-                count += 1
-                part_no = part.find('PartNumber').text
-                etag = part.find('ETag').text
-
-                self.assertEqual(part_no, str(count))
-                self.assertEqual(etag, headers['etag'])
-
-            # Make sure that manifest contains at least one part
-            self.assertTrue(count >= 1)
-
-            body = self.fixtures.load('complete_multipart.xml')
-            return (httplib.OK,
+        return (httplib.OK,
                     body,
                     headers,
                     httplib.responses[httplib.OK])
@@ -750,14 +707,17 @@ class S3Tests(unittest.TestCase):
     def test_upload_object_invalid_hash1(self):
         # Invalid hash is detected on the amazon side and BAD_REQUEST is
         # returned
-        def upload_file(self, response, file_path, chunked=False,
-                        calculate_hash=True):
-            return True, 'hash343hhash89h932439jsaa89', 1000
+        def upload_file(self, object_name=None, content_type=None,
+            request_path=None, request_method=None,
+            headers=None, file_path=None, stream=None):
+            return {'response': MockResponse(200),
+                'bytes_transferred': 1000,
+                'data_hash': 'hash343hhash89h932439jsaa89'}
 
         self.mock_raw_response_klass.type = 'INVALID_HASH1'
 
-        old_func = self.driver_type._upload_file
-        self.driver_type._upload_file = upload_file
+        old_func = self.driver_type._upload_object
+        self.driver_type._upload_object = upload_file
         file_path = os.path.abspath(__file__)
         container = Container(name='foo_bar_container', extra={},
                               driver=self.driver)
@@ -772,19 +732,23 @@ class S3Tests(unittest.TestCase):
             self.fail(
                 'Invalid hash was returned but an exception was not thrown')
         finally:
-            self.driver_type._upload_file = old_func
+            self.driver_type._upload_object = old_func
 
     def test_upload_object_invalid_hash2(self):
         # Invalid hash is detected when comparing hash provided in the response
         # ETag header
-        def upload_file(self, response, file_path, chunked=False,
-                        calculate_hash=True):
-            return True, '0cc175b9c0f1b6a831c399e269772661', 1000
+        def upload_file(self, object_name=None, content_type=None,
+            request_path=None, request_method=None,
+            headers=None, file_path=None, stream=None):
+            return {'response': MockResponse(200, headers={'etag': 'woopwoopwoop'}),
+                'bytes_transferred': 1000,
+                'data_hash': '0cc175b9c0f1b6a831c399e269772661'}
 
         self.mock_raw_response_klass.type = 'INVALID_HASH2'
 
-        old_func = self.driver_type._upload_file
-        self.driver_type._upload_file = upload_file
+        old_func = self.driver_type._upload_object
+        self.driver_type._upload_object = upload_file
+
         file_path = os.path.abspath(__file__)
         container = Container(name='foo_bar_container', extra={},
                               driver=self.driver)
@@ -799,15 +763,19 @@ class S3Tests(unittest.TestCase):
             self.fail(
                 'Invalid hash was returned but an exception was not thrown')
         finally:
-            self.driver_type._upload_file = old_func
+            self.driver_type._upload_object = old_func
 
     def test_upload_object_success(self):
-        def upload_file(self, response, file_path, chunked=False,
-                        calculate_hash=True):
-            return True, '0cc175b9c0f1b6a831c399e269772661', 1000
-
-        old_func = self.driver_type._upload_file
-        self.driver_type._upload_file = upload_file
+        def upload_file(self, object_name=None, content_type=None,
+            request_path=None, request_method=None,
+            headers=None, file_path=None, stream=None):
+            return {'response': MockResponse(200,
+                                             headers={'etag': '0cc175b9c0f1b6a831c399e269772661'}),
+                'bytes_transferred': 1000,
+                'data_hash': '0cc175b9c0f1b6a831c399e269772661'}
+        self.mock_response_klass.type = None
+        old_func = self.driver_type._upload_object
+        self.driver_type._upload_object = upload_file
         file_path = os.path.abspath(__file__)
         container = Container(name='foo_bar_container', extra={},
                               driver=self.driver)
@@ -821,15 +789,20 @@ class S3Tests(unittest.TestCase):
         self.assertEqual(obj.name, 'foo_test_upload')
         self.assertEqual(obj.size, 1000)
         self.assertTrue('some-value' in obj.meta_data)
-        self.driver_type._upload_file = old_func
+        self.driver_type._upload_object = old_func
 
     def test_upload_object_with_acl(self):
-        def upload_file(self, response, file_path, chunked=False,
-                        calculate_hash=True):
-            return True, '0cc175b9c0f1b6a831c399e269772661', 1000
+        def upload_file(self, object_name=None, content_type=None,
+            request_path=None, request_method=None,
+            headers=None, file_path=None, stream=None):
+            return {'response': MockResponse(200, headers={'etag': '0cc175b9c0f1b6a831c399e269772661'}),
+                'bytes_transferred': 1000,
+                'data_hash': '0cc175b9c0f1b6a831c399e269772661'}
+
+        self.mock_response_klass.type = None
+        old_func = self.driver_type._upload_object
+        self.driver_type._upload_object = upload_file
 
-        old_func = self.driver_type._upload_file
-        self.driver_type._upload_file = upload_file
         file_path = os.path.abspath(__file__)
         container = Container(name='foo_bar_container', extra={},
                               driver=self.driver)
@@ -843,7 +816,7 @@ class S3Tests(unittest.TestCase):
         self.assertEqual(obj.name, 'foo_test_upload')
         self.assertEqual(obj.size, 1000)
         self.assertEqual(obj.extra['acl'], 'public-read')
-        self.driver_type._upload_file = old_func
+        self.driver_type._upload_object = old_func
 
     def test_upload_empty_object_via_stream(self):
         if self.driver.supports_s3_multipart_upload:
@@ -856,7 +829,7 @@ class S3Tests(unittest.TestCase):
         container = Container(name='foo_bar_container', extra={},
                               driver=self.driver)
         object_name = 'foo_test_stream_data'
-        iterator = DummyIterator(data=[''])
+        iterator = BytesIO(b(''))
         extra = {'content_type': 'text/plain'}
         obj = self.driver.upload_object_via_stream(container=container,
                                                    object_name=object_name,
@@ -877,7 +850,7 @@ class S3Tests(unittest.TestCase):
         container = Container(name='foo_bar_container', extra={},
                               driver=self.driver)
         object_name = 'foo_test_stream_data'
-        iterator = DummyIterator(data=['2', '3', '5'])
+        iterator = BytesIO(b('234'))
         extra = {'content_type': 'text/plain'}
         obj = self.driver.upload_object_via_stream(container=container,
                                                    object_name=object_name,
@@ -898,8 +871,7 @@ class S3Tests(unittest.TestCase):
         container = Container(name='foo_bar_container', extra={},
                               driver=self.driver)
         object_name = 'foo_test_stream_data'
-        iterator = DummyIterator(
-            data=['2' * CHUNK_SIZE, '3' * CHUNK_SIZE, '5'])
+        iterator = BytesIO(b('234'*CHUNK_SIZE))
         extra = {'content_type': 'text/plain'}
         obj = self.driver.upload_object_via_stream(container=container,
                                                    object_name=object_name,
@@ -907,7 +879,7 @@ class S3Tests(unittest.TestCase):
                                                    extra=extra)
 
         self.assertEqual(obj.name, object_name)
-        self.assertEqual(obj.size, CHUNK_SIZE * 2 + 1)
+        self.assertEqual(obj.size, CHUNK_SIZE * 3)
 
     def test_upload_object_via_stream_abort(self):
         if not self.driver.supports_s3_multipart_upload:


Mime
View raw message