diff --git a/attic/cart/test_cart_export_service.py b/attic/cart/test_cart_export_service.py index 72f60209fd..d43f9368d9 100644 --- a/attic/cart/test_cart_export_service.py +++ b/attic/cart/test_cart_export_service.py @@ -18,9 +18,6 @@ from azul.service.collection_data_access import ( CollectionDataAccess, ) -from retorts import ( - ResponsesHelper, -) @skipIf(config.dss_endpoint is None, @@ -89,7 +86,6 @@ def mock_get_paginable_cart_items(**kwargs): self.assertEqual(2, len(content_items)) self.assertIn(expected_content_item_1, content_items) - @responses.activate @patch('azul.deployment.aws.dynamo') def test_export_create_new_collection(self, _dynamodb_client): expected_collection = dict(uuid='abc', version='123') @@ -98,7 +94,7 @@ def test_export_create_new_collection(self, _dynamodb_client): service = CartExportService() with patch.object(service.cart_item_manager, 'get_cart', side_effect=[dict(CartName='abc123')]): with patch.object(service, 'get_content', side_effect=[expected_get_content_result]): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.Response(responses.PUT, CollectionDataAccess.endpoint_url('collections'), status=201, @@ -114,7 +110,6 @@ def test_export_create_new_collection(self, _dynamodb_client): self.assertEqual(expected_get_content_result['resume_token'], result['resume_token']) self.assertEqual(len(expected_get_content_result['items']), result['exported_item_count']) - @responses.activate @patch('azul.deployment.aws.dynamo') def test_export_append_items_to_collection_ok(self, _dynamodb_client): expected_collection = dict(uuid='abc', version='123') @@ -122,7 +117,7 @@ def test_export_append_items_to_collection_ok(self, _dynamodb_client): items=[1, 2, 3, 4]) # NOTE: This is just for the test. service = CartExportService() with patch.object(service, 'get_content', side_effect=[expected_get_content_result]): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.Response( responses.PATCH, CollectionDataAccess.endpoint_url('collections', expected_collection['uuid']), @@ -139,7 +134,6 @@ def test_export_append_items_to_collection_ok(self, _dynamodb_client): self.assertEqual(expected_get_content_result['resume_token'], result['resume_token']) self.assertEqual(len(expected_get_content_result['items']), result['exported_item_count']) - @responses.activate @patch('azul.deployment.aws.dynamo') def test_export_append_items_to_collection_raises_expired_access_token_error(self, _dynamodb_client): expected_collection = dict(uuid='abc', version='123') @@ -148,7 +142,7 @@ def test_export_append_items_to_collection_raises_expired_access_token_error(sel service = CartExportService() with self.assertRaises(ExpiredAccessTokenError): with patch.object(service, 'get_content', side_effect=[expected_get_content_result]): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = CollectionDataAccess.endpoint_url('collections', expected_collection['uuid']) helper.add(responses.Response(responses.PATCH, url, status=401, json=dict(code='abc'))) service.export(export_id='export1', diff --git a/test/retorts.py b/attic/test/retorts.py similarity index 100% rename from test/retorts.py rename to attic/test/retorts.py diff --git a/requirements.dev.trans.txt b/requirements.dev.trans.txt index 67b41616ff..27054993a0 100644 --- a/requirements.dev.trans.txt +++ b/requirements.dev.trans.txt @@ -1,28 +1,26 @@ argh==0.26.2 arrow==0.12.1 aws-sam-translator==1.35.0 -aws-xray-sdk==2.7.0 -cfn-lint==0.48.2 +aws-xray-sdk==2.8.0 +cfn-lint==0.49.2 click==7.1.2 -colorama==0.4.1 -configargparse==1.4 -datetime==4.3 +colorama==0.4.3 +configargparse==1.4.1 decorator==4.4.2 docker-pycreds==0.4.0 ecdsa==0.14.1 enum-compat==0.0.3 flask-basicauth==0.2.0 -flask==1.1.2 +flask==2.0.0 geventhttpclient==1.4.4 google-auth-httplib2==0.1.0 -greenlet==1.0.0 +greenlet==1.1.0 httplib2==0.19.1 -itsdangerous==1.1.0 -jsondiff==1.1.2 +itsdangerous==2.0.1 +jsondiff==1.3.0 jsonpatch==1.32 junit-xml==1.9 mccabe==0.6.1 -mock==4.0.3 msgpack==1.0.2 mypy-extensions==0.4.3 networkx==2.5.1 @@ -39,7 +37,8 @@ smmap2==3.0.1 smmap==4.0.0 sshpubkeys==3.3.1 uritemplate==3.0.1 -websocket-client==0.58.0 +websocket-client==1.0.0 xmltodict==0.12.0 +zipp==3.4.1 zope.event==4.5.0 -zope.interface==5.3.0 +zope.interface==5.4.0 diff --git a/requirements.dev.txt b/requirements.dev.txt index e6235e0c99..33a945e815 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,4 +1,4 @@ -awscli==1.16.283 +awscli==1.19.33 git+git://github.com/DataBiosphere/azul-chalice@1.18.0+5#egg=chalice coverage==5.1 docker==3.4.1 @@ -9,7 +9,9 @@ gitpython==2.1.11 google-api-python-client==1.7.8 jsonschema==3.2.0 locust==1.2.3 -moto==1.3.13 # 1.3.14 does not work, possibly related to https://github.com/spulec/moto/issues/2567 +# FIXME: Update Moto when decorator ordering issue is resovled +# https://github.com/DataBiosphere/azul/issues/3035 +moto[all]==2.0.6 openapi-spec-validator==0.2.8 pygithub==1.43.5 pytest==3.2.3 diff --git a/requirements.trans.txt b/requirements.trans.txt index 5348b231cd..ae87e9d757 100644 --- a/requirements.trans.txt +++ b/requirements.trans.txt @@ -1,28 +1,28 @@ -argcomplete==1.12.2 +argcomplete==1.12.3 atomicwrites==1.4.0 bagit==1.7.0 -cachetools==4.2.1 +cachetools==4.2.2 certifi==2020.12.5 chardet==3.0.4 commonmark==0.9.1 crc32c==2.2 dcplib==2.1.2 docutils==0.15.2 -google-api-core==1.26.3 +google-api-core==1.28.0 google-auth-oauthlib==0.4.4 google-cloud-core==1.6.0 google-resumable-media==0.5.1 googleapis-common-protos==1.53.0 -idna==2.8 -jinja2==2.11.3 +idna==2.10 +jinja2==3.0.1 jsonpointer==1.14 jsonschema==3.2.0 -markupsafe==1.1.1 +markupsafe==2.0.1 oauthlib==3.1.0 orderedmultidict==1.0.1 packaging==20.9 proto-plus==1.18.1 -protobuf==3.15.8 +protobuf==3.17.0 puremagic==1.4 pycparser==2.20 pyopenssl==20.0.1 @@ -30,8 +30,8 @@ pyparsing==2.4.7 python-dateutil==2.8.1 pytz==2021.1 requests-oauthlib==1.3.0 -s3transfer==0.2.1 -six==1.15.0 +s3transfer==0.3.7 +six==1.16.0 tenacity==5.0.4 text-unidecode==1.3 tqdm==4.60.0 diff --git a/requirements.txt b/requirements.txt index 3db3aff028..5aba31afd6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,9 +3,9 @@ attrs==19.3.0 aws-requests-auth==0.4.2 bdbag==1.5.1 boltons==20.2.0 -boto3==1.10.19 +boto3==1.17.29 boto==2.49.0 -botocore==1.13.19 +botocore==1.20.33 cffi==1.14.0 cryptography==2.3.1 deprecated==1.2.9 diff --git a/src/azul/portal_service.py b/src/azul/portal_service.py index 1e394d45dd..e0055f79ed 100644 --- a/src/azul/portal_service.py +++ b/src/azul/portal_service.py @@ -14,6 +14,10 @@ cast, ) +from botocore.exceptions import ( + ClientError, +) + from azul import ( cached_property, config, @@ -197,7 +201,17 @@ def _read_db(self, version: str) -> JSONs: Key=self.object_key, VersionId=version) except self.client.exceptions.NoSuchKey: + # We hypothesize that when S3 was only eventually consistent, + # NoSuchKey would have been raised when an object had been + # created but hadn't materialized yet … raise NoSuchObjectVersion(version) + except ClientError as e: + # … and that NoSuchVersion would have been raised when the object had + # been overwritten but the overwrite hadn't materialized yet. + if e.response['Error']['Code'] == 'NoSuchVersion': + raise NoSuchObjectVersion(version) + else: + raise else: json_bytes = response['Body'].read() return json.loads(json_bytes.decode()) diff --git a/src/azul/service/storage_service.py b/src/azul/service/storage_service.py index 9af2238143..07a90021ca 100644 --- a/src/azul/service/storage_service.py +++ b/src/azul/service/storage_service.py @@ -116,8 +116,11 @@ def get_presigned_url(self, key: str, file_name: Optional[str] = None) -> str: **({} if file_name is None else {'ResponseContentDisposition': f'attachment;filename="{file_name}"'}) }) - def create_bucket(self, bucket_name: str = None): - self.client.create_bucket(Bucket=(bucket_name or self.bucket_name)) + def create_bucket(self, bucket_name: Optional[str] = None): + self.client.create_bucket(Bucket=(bucket_name or self.bucket_name), + CreateBucketConfiguration={ + 'LocationConstraint': config.region + }) def put_object_tagging(self, object_key: str, tagging: Tagging = None): deadline = time.time() + 60 diff --git a/test/azul_test_case.py b/test/azul_test_case.py index c4313b84c2..472fbb3c80 100644 --- a/test/azul_test_case.py +++ b/test/azul_test_case.py @@ -18,6 +18,10 @@ Credentials, ) import botocore.session +from moto.core.models import ( + ACCOUNT_ID, + moto_api_backend, +) from azul import ( CatalogName, @@ -153,11 +157,6 @@ def tearDownClass(cls) -> None: super().tearDownClass() def setUp(self) -> None: - # FIXME: remove local import and update moto - # https://github.com/DataBiosphere/azul/issues/1718 - from moto.core import ( - moto_api_backend, - ) super().setUp() # Moto backends are reset to ensure no resources are left over if a test # fails to clean up after itself. @@ -204,12 +203,7 @@ def _mock_aws_account_id(cls): # Set AZUL_AWS_ACCOUNT_ID to what the Moto is using. This circumvents # assertion errors in azul.deployment.aws.account. cls._aws_account_id = os.environ['AZUL_AWS_ACCOUNT_ID'] - # The fake Moto account ID is defined as a constant in a Moto module - # but we cannot import any Moto modules since doing so has a bad side - # effect: https://github.com/spulec/moto/issues/2058. - # FIXME: Switch to overriding MOTO_ACCOUNT_ID as part of - # https://github.com/DataBiosphere/azul/issues/1718 - os.environ['AZUL_AWS_ACCOUNT_ID'] = '123456789012' + os.environ['AZUL_AWS_ACCOUNT_ID'] = ACCOUNT_ID @classmethod def _restore_aws_account(cls): diff --git a/test/health_check_test_case.py b/test/health_check_test_case.py index 2c6276c214..c9fb7e76e8 100644 --- a/test/health_check_test_case.py +++ b/test/health_check_test_case.py @@ -2,9 +2,6 @@ ABCMeta, abstractmethod, ) -from contextlib import ( - contextmanager, -) import os import time from typing import ( @@ -40,17 +37,14 @@ from azul.modules import ( load_app_module, ) -from azul.service.storage_service import ( - StorageService, -) from azul.types import ( JSON, ) from es_test_case import ( ElasticsearchTestCase, ) -from retorts import ( - ResponsesHelper, +from service import ( + StorageServiceTestCase, ) @@ -61,7 +55,10 @@ def load_tests(loader, tests, pattern): return suite -class HealthCheckTestCase(LocalAppTestCase, ElasticsearchTestCase, metaclass=ABCMeta): +class HealthCheckTestCase(LocalAppTestCase, + ElasticsearchTestCase, + StorageServiceTestCase, + metaclass=ABCMeta): endpoints = ( '/index/files?size=1', '/index/projects?size=1', @@ -115,23 +112,20 @@ def test_health_endpoint_keys(self): self._create_mock_queues() for keys, expected_response in expected.items(): with self.subTest(msg=keys): - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: self._mock_other_lambdas(helper, up=True) - with self._mock_service_endpoints(helper, endpoint_states): - response = requests.get(self.base_url + '/health/' + keys) - self.assertEqual(200, response.status_code) - self.assertEqual(expected_response, response.json()) + self._mock_service_endpoints(helper, endpoint_states) + response = requests.get(self.base_url + '/health/' + keys) + self.assertEqual(200, response.status_code) + self.assertEqual(expected_response, response.json()) @mock_s3 @mock_sts @mock_sqs def test_cached_health(self): - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() # No health object is available in S3 bucket, yielding an error - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: response = requests.get(self.base_url + '/health/cached') self.assertEqual(500, response.status_code) self.assertEqual('ChaliceViewError: Cached health object does not exist', response.json()['Message']) @@ -140,27 +134,23 @@ def test_cached_health(self): self._create_mock_queues() endpoint_states = self._endpoint_states() app = load_app_module(self.lambda_name()) - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) - with self._mock_service_endpoints(helper, endpoint_states): - app.update_health_cache(MagicMock(), MagicMock()) - response = requests.get(self.base_url + '/health/cached') - self.assertEqual(200, response.status_code) + with self.helper() as helper: + self._mock_service_endpoints(helper, endpoint_states) + app.update_health_cache(MagicMock(), MagicMock()) + response = requests.get(self.base_url + '/health/cached') + self.assertEqual(200, response.status_code) # Another failure is observed when the cache health object is older than 2 minutes future_time = time.time() + 3 * 60 - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: with patch('time.time', new=lambda: future_time): response = requests.get(self.base_url + '/health/cached') self.assertEqual(500, response.status_code) self.assertEqual('ChaliceViewError: Cached health object is stale', response.json()['Message']) - @responses.activate def test_laziness(self): # Note the absence of moto decorators on this test. - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: self._mock_other_lambdas(helper, up=True) # If Health weren't lazy, it would fail due the lack of mocks for SQS. response = requests.get(self.base_url + '/health/other_lambdas') @@ -198,7 +188,9 @@ def _expected_queues(self, up: bool) -> JSON: 'delayed': 0, 'invisible': 0, 'queued': 0 } } if up else { - 'up': False, 'error': 'The specified queue does not exist for this wsdl version.' + 'up': False, + 'error': f'The specified queue {queue_name} does not exist for' + f' this wsdl version.' } for queue_name in config.all_queue_names }) @@ -259,22 +251,32 @@ def _expected_elasticsearch(self, up: bool) -> JSON: } def _test(self, endpoint_states: Mapping[str, bool], lambdas_up: bool, path: str = '/health/fast'): - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: self._mock_other_lambdas(helper, lambdas_up) - with self._mock_service_endpoints(helper, endpoint_states): - return requests.get(self.base_url + path) + self._mock_service_endpoints(helper, endpoint_states) + return requests.get(self.base_url + path) + + def helper(self): + helper = responses.RequestsMock() + helper.add_passthru(self.base_url) + # We originally shared the Requests mock with Moto which had this set + # to False. Because of that, and without noticing, we ended up mocking + # more responses than necessary for some of the tests. Instead of + # rewriting the tests to only mock what is actually used, we simply + # disable the assertion, just like Moto did. + helper.assert_all_requests_are_fired = False + return helper - @contextmanager - def _mock_service_endpoints(self, helper: ResponsesHelper, endpoint_states: Mapping[str, bool]) -> None: + def _mock_service_endpoints(self, + helper: responses.RequestsMock, + endpoint_states: Mapping[str, bool]) -> None: for endpoint, endpoint_up in endpoint_states.items(): helper.add(responses.Response(method='HEAD', url=config.service_endpoint() + endpoint, status=200 if endpoint_up else 503, json={})) - yield - def _mock_other_lambdas(self, helper: ResponsesHelper, up: bool): + def _mock_other_lambdas(self, helper: responses.RequestsMock, up: bool): for lambda_name in self._other_lambda_names(): helper.add(responses.Response(method='GET', url=config.lambda_endpoint(lambda_name) + '/health/basic', diff --git a/test/indexer/test_hca_indexer.py b/test/indexer/test_hca_indexer.py index 5e4d5a427c..8da58afc7a 100644 --- a/test/indexer/test_hca_indexer.py +++ b/test/indexer/test_hca_indexer.py @@ -100,9 +100,6 @@ from indexer import ( IndexerTestCase, ) -from retorts import ( - ResponsesHelper, -) logger = logging.getLogger(__name__) @@ -1470,16 +1467,14 @@ def test_invalid_auth_for_notification_request(self): self.assertEqual(401, response.status_code) def _test(self, body: JSON, delete: bool, valid_auth: bool) -> requests.Response: - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) - hmac_creds = {'key': b'good key', 'key_id': 'the id'} - with patch('azul.deployment.aws.get_hmac_key_and_id', return_value=hmac_creds): - if valid_auth: - auth = hmac.prepare() - else: - auth = HTTPSignatureAuth(key=b'bad key', key_id='the id') - url = furl(self.base_url, path=(self.catalog, 'delete' if delete else 'add')) - return requests.post(url.url, json=body, auth=auth) + hmac_creds = {'key': b'good key', 'key_id': 'the id'} + with patch('azul.deployment.aws.get_hmac_key_and_id', return_value=hmac_creds): + if valid_auth: + auth = hmac.prepare() + else: + auth = HTTPSignatureAuth(key=b'bad key', key_id='the id') + url = furl(self.base_url, path=(self.catalog, 'delete' if delete else 'add')) + return requests.post(url.url, json=body, auth=auth) @staticmethod def _create_mock_notifications_queue(): diff --git a/test/indexer/test_indexer_controller.py b/test/indexer/test_indexer_controller.py index 1181c7ef48..b4096c928e 100644 --- a/test/indexer/test_indexer_controller.py +++ b/test/indexer/test_indexer_controller.py @@ -1,10 +1,12 @@ import json import logging +from unittest import ( + mock, +) from chalice.app import ( SQSRecord, ) -import mock from more_itertools import ( one, ) diff --git a/test/service/__init__.py b/test/service/__init__.py index 5c095971de..3da79c26a1 100644 --- a/test/service/__init__.py +++ b/test/service/__init__.py @@ -20,11 +20,15 @@ LocalAppTestCase, ) from azul import ( + cached_property, config, ) from azul.indexer import ( SourcedBundleFQID, ) +from azul.service.storage_service import ( + StorageService, +) from indexer import ( IndexerTestCase, ) @@ -120,3 +124,13 @@ def setUpClass(cls): def tearDownClass(cls): cls._dss_mock.stop() super().tearDownClass() + + +class StorageServiceTestCase(TestCase): + """ + A mixin for test cases that utilize StorageService. + """ + + @cached_property + def storage_service(self) -> StorageService: + return StorageService() diff --git a/test/service/test_collection_data_access.py b/test/service/test_collection_data_access.py index 49899a7841..cbc781709d 100644 --- a/test/service/test_collection_data_access.py +++ b/test/service/test_collection_data_access.py @@ -18,9 +18,6 @@ UnauthorizedClientAccessError, UpdateError, ) -from retorts import ( - ResponsesHelper, -) @skipIf(config.dss_endpoint is None, @@ -31,12 +28,11 @@ def setUp(self): fake_access_token = 'fake_access_token' self.cda = CollectionDataAccess(fake_access_token) - @responses.activate def test_get_ok(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' fake_collection = {'hello': 'world'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.Response(responses.GET, self.cda.endpoint_url('collections', test_collection_uuid), json=fake_collection)) @@ -46,11 +42,10 @@ def test_get_ok(self): version=test_collection_version, collection=fake_collection)) - @responses.activate def test_get_raises_retrival_error(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.GET, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(567, '{}'), @@ -58,12 +53,11 @@ def test_get_raises_retrival_error(self): with self.assertRaises(RetrievalError): self.cda.get(test_collection_uuid, test_collection_version) - @responses.activate def test_create_ok(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' expected_collection = dict(uuid=test_collection_uuid, version=test_collection_version) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PUT, self.cda.endpoint_url('collections'), callback=RequestCallback(201, json.dumps(expected_collection)), @@ -71,12 +65,11 @@ def test_create_ok(self): collection = self.cda.create(test_collection_uuid, 'foo bar', 'bar', test_collection_version, []) self.assertEqual(collection, expected_collection) - @responses.activate def test_create_raises_creation_error(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' fake_dss_response = {"code": "unknown"} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PUT, self.cda.endpoint_url('collections'), callback=RequestCallback(500, json.dumps(fake_dss_response)), @@ -84,12 +77,11 @@ def test_create_raises_creation_error(self): with self.assertRaises(CreationError): self.cda.create(test_collection_uuid, 'foo bar', 'bar', test_collection_version, []) - @responses.activate def test_append_with_no_items_successful(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' expected_collection = dict(uuid=test_collection_uuid, version=test_collection_version) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PATCH, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(200, json.dumps(expected_collection)), @@ -97,12 +89,11 @@ def test_append_with_no_items_successful(self): collection = self.cda.append(test_collection_uuid, test_collection_version, []) self.assertEqual(collection, expected_collection) - @responses.activate def test_append_with_some_items_successful(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' expected_collection = dict(uuid=test_collection_uuid, version=test_collection_version) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PATCH, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(200, json.dumps(expected_collection)), @@ -114,11 +105,10 @@ def test_append_with_some_items_successful(self): dict(type='foo_n', uuid='bar_n', version='baz_n')]) self.assertEqual(collection, expected_collection) - @responses.activate def test_append_raises_update_error(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PATCH, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(405, '{}'), @@ -126,11 +116,10 @@ def test_append_raises_update_error(self): with self.assertRaises(UpdateError): self.cda.append(test_collection_uuid, test_collection_version, []) - @responses.activate def test_send_request_successful_with_auto_retry_on_http_504_timeout(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'hello_world'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, @@ -141,7 +130,6 @@ def test_send_request_successful_with_auto_retry_on_http_504_timeout(self): response = self.cda.send_request(test_collection_uuid, 'get', url, {}) self.assertEqual(response.json(), expected_response) - @responses.activate def test_send_request_successful_with_auto_retry_on_http_502(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'hello_world'} @@ -154,7 +142,7 @@ def test_send_request_successful_with_auto_retry_on_http_502(self): def mock_request_handler(_request): return mock_response_sequence.pop(0) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, @@ -163,17 +151,15 @@ def mock_request_handler(_request): response = self.cda.send_request(test_collection_uuid, 'get', url, {}) self.assertEqual(response.json(), expected_response) - @responses.activate def test_send_request_fails_after_too_many_retries(self): test_collection_uuid = 'abcdef123456' with self.assertRaises(ServerTimeoutError): self.cda.send_request(test_collection_uuid, 'get', 'fake_url', {}, delay=64) - @responses.activate def test_send_request_with_unexpected_response_code_raises_client_error(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'hello_world'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, @@ -182,11 +168,10 @@ def test_send_request_with_unexpected_response_code_raises_client_error(self): with self.assertRaises(ClientError): self.cda.send_request(test_collection_uuid, 'get', url, {}, expected_status_code=200) - @responses.activate def test_send_request_with_unexpected_response_code_raises_unauthorized_client_access_error(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'mock_error'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, diff --git a/test/service/test_drs.py b/test/service/test_drs.py index dbdaf51fb5..4666e6fbbd 100644 --- a/test/service/test_drs.py +++ b/test/service/test_drs.py @@ -30,9 +30,6 @@ dss_drs_object_uri, dss_drs_object_url, ) -from retorts import ( - ResponsesHelper, -) from service import ( DSSUnitTestCase, WebServiceTestCase, @@ -56,9 +53,8 @@ def tearDownClass(cls): def chalice_config(self): return ChaliceConfig.create(lambda_timeout=15) - @responses.activate def _get_data_object(self, file_uuid, file_version): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) drs_url = dss_dos_object_url(file_uuid=file_uuid, catalog=self.catalog, @@ -133,7 +129,6 @@ class DRSTest(WebServiceTestCase, DSSUnitTestCase): signed_url = 'https://org-hca-dss-checkout-prod.s3.amazonaws.com/blobs/307.a72.eb6?foo=bar&et=cetera' gs_url = 'gs://important-bucket/object/path' - @responses.activate def test_drs(self): """ Mocks the DSS backend, then uses the DRS endpoints as a client is @@ -143,7 +138,7 @@ def test_drs(self): file_version = '2018-11-02T113344.698028Z' for redirects in (0, 1, 2, 6): with self.subTest(redirects=redirects): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) self._mock_responses(helper, redirects, file_uuid, file_version=file_version) # Make first client request @@ -271,11 +266,10 @@ def _mock_responses(self, helper, redirects, file_uuid, file_version=None): helper.add(self._dss_response(file_uuid, file_version, 'aws', initial=False, _301=False)) helper.add(self._dss_response(file_uuid, file_version, 'gcp', initial=False, _301=False)) - @responses.activate def test_data_object_not_found(self): file_uuid = 'NOT_A_GOOD_IDEA' error_body = 'DRS should just proxy the DSS for error responses' - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) url = f'{config.dss_endpoint}/files/{file_uuid}' helper.add(responses.Response(method=responses.GET, diff --git a/test/service/test_manifest.py b/test/service/test_manifest.py index 1d4ad36d01..ac2b05c34e 100644 --- a/test/service/test_manifest.py +++ b/test/service/test_manifest.py @@ -82,20 +82,15 @@ ManifestGenerator, ManifestService, ) -from azul.service.storage_service import ( - StorageService, -) from azul.types import ( JSON, ) from azul_test_case import ( AzulUnitTestCase, ) -from retorts import ( - ResponsesHelper, -) from service import ( DSSUnitTestCase, + StorageServiceTestCase, WebServiceTestCase, ) @@ -107,7 +102,8 @@ def setUpModule(): configure_test_logging(logger) -class ManifestTestCase(WebServiceTestCase): +@mock_s3 +class ManifestTestCase(WebServiceTestCase, StorageServiceTestCase): def setUp(self): super().setUp() @@ -137,7 +133,7 @@ def _get_manifest(self, format_: ManifestFormat, filters: Filters, stream=False) return requests.get(manifest.location, stream=stream) def _get_manifest_object(self, format_: ManifestFormat, filters: JSON) -> Manifest: - service = ManifestService(StorageService()) + service = ManifestService(self.storage_service) return service.get_manifest(format_=format_, catalog=self.catalog, filters=filters) @@ -151,14 +147,8 @@ def manifest_test(test): @mock_sts @mock_s3 def wrapper(self, *args, **kwargs): - with ResponsesHelper() as helper: - # moto will mock the requests.get call so we can't hit localhost; - # add_passthru let's us hit the server. - # See this GitHub issue and comment: https://github.com/spulec/moto/issues/1026#issuecomment-380054270 - helper.add_passthru(self.base_url) - storage_service = StorageService() - storage_service.create_bucket() - return test(self, *args, **kwargs) + self.storage_service.create_bucket() + return test(self, *args, **kwargs) return wrapper @@ -1595,7 +1585,7 @@ def test_hash_validity(self): self._index_canned_bundle(original_fqid) filters = {'project': {'is': ['Single of human pancreas']}} old_object_keys = {} - service = ManifestService(StorageService()) + service = ManifestService(self.storage_service) for format_ in ManifestFormat: with self.subTest(msg='indexing new bundle', format_=format_): # When a new bundle is indexed and its full manifest cached, @@ -1668,7 +1658,7 @@ def test_fetch_manifest(self, get_cached_manifest): formats with a mocked return value from `get_cached_manifest`. """ manifest_url = 'https://url.to.manifest?foo=bar' - service = ManifestService(StorageService()) + service = ManifestService(self.storage_service) for format_ in ManifestFormat: with self.subTest(format_=format_): # Mock get_cached_manifest.return_value with a dummy 'location' diff --git a/test/service/test_manifest_async.py b/test/service/test_manifest_async.py index 6901735c74..1260c9ff32 100644 --- a/test/service/test_manifest_async.py +++ b/test/service/test_manifest_async.py @@ -15,6 +15,7 @@ mock_sts, ) import requests +import responses from app_test_case import ( LocalAppTestCase, @@ -44,9 +45,6 @@ from azul_test_case import ( AzulUnitTestCase, ) -from retorts import ( - ResponsesHelper, -) # noinspection PyPep8Naming @@ -175,7 +173,7 @@ def test(self, mock_uuid, mock_helper): service = load_app_module('service') # In a LocalAppTestCase we need the actual state machine name state_machine_name = config.state_machine_name(service.generate_manifest.name) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) for fetch in (True, False): with self.subTest(fetch=fetch): diff --git a/test/service/test_portal_service.py b/test/service/test_portal_service.py index 99ce7d506c..fa06c0b773 100644 --- a/test/service/test_portal_service.py +++ b/test/service/test_portal_service.py @@ -96,28 +96,16 @@ def setUp(self): self.portal_service = PortalService() self.s3_client = aws.client('s3') - self.s3_client.create_bucket(Bucket=self.portal_service.bucket) + self.s3_client.create_bucket(Bucket=self.portal_service.bucket, + CreateBucketConfiguration={ + 'LocationConstraint': config.region + }) self.s3_client.put_bucket_versioning(Bucket=self.portal_service.bucket, VersioningConfiguration={ 'Status': 'Enabled', 'MFADelete': 'Disabled' }) - def tearDown(self): - super().tearDown() - - # To ensure that the bucket is cleared between tests, all versions - # must be deleted. The most convenient way to do this is just to - # disabling versioning and perform a single delete. - self.s3_client.put_bucket_versioning(Bucket=self.portal_service.bucket, - VersioningConfiguration={ - 'Status': 'Disabled', - 'MFADelete': 'Disabled' - }) - self.s3_client.delete_object(Bucket=self.portal_service.bucket, - Key=self.portal_service.object_key) - self.s3_client.delete_bucket(Bucket=self.portal_service.bucket) - def download_db(self) -> JSONs: response = self.s3_client.get_object(Bucket=self.portal_service.bucket, Key=self.portal_service.object_key) @@ -143,7 +131,12 @@ def test_internal_crud(self): with self.subTest('read'): read_db = self.portal_service._read_db(version) self.assertEqual(read_db, download_db) - self.assertRaises(NoSuchObjectVersion, self.portal_service._read_db, 'fake_version') + # The version identifier below is syntactically correct, but does + # not refer to any real version of any S3 object. + # See also https://github.com/spulec/moto/issues/3884 + self.assertRaises(NoSuchObjectVersion, + self.portal_service._read_db, + 'VWVT9JkWTreQ95JbRmQt6T3LWrljLpRZ') with self.subTest('update'): version = self.portal_service._write_db(self.dummy_db, version) diff --git a/test/service/test_query_shortener.py b/test/service/test_query_shortener.py index 22c46393d9..a2344389ca 100644 --- a/test/service/test_query_shortener.py +++ b/test/service/test_query_shortener.py @@ -17,11 +17,8 @@ from azul.logging import ( configure_test_logging, ) -from azul.service.storage_service import ( - StorageService, -) -from retorts import ( - ResponsesHelper, +from service import ( + StorageServiceTestCase, ) @@ -30,21 +27,19 @@ def setUpModule(): configure_test_logging() -class TestQueryShortener(LocalAppTestCase): +class TestQueryShortener(LocalAppTestCase, StorageServiceTestCase): @classmethod def lambda_name(cls) -> str: return 'service' def _shorten_query_url(self, url, expect_status=None): - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) - response = requests.post(self.base_url + '/url', json={'url': url}) - if expect_status is None: - response.raise_for_status() - else: - self.assertEqual(response.status_code, expect_status) - return response.json() + response = requests.post(self.base_url + '/url', json={'url': url}) + if expect_status is None: + response.raise_for_status() + else: + self.assertEqual(response.status_code, expect_status) + return response.json() @mock_sts @mock_s3 @@ -63,7 +58,7 @@ def test_valid_url(self, storage_service_get, storage_service_put): # exception classes are tied to the session and different threads use # different sessions. https://github.com/boto/botocore/issues/2238 def side_effect(_): - raise StorageService().client.exceptions.NoSuchKey({}, "") + raise self.storage_service.client.exceptions.NoSuchKey({}, "") storage_service_get.side_effect = side_effect response = self._shorten_query_url( @@ -87,7 +82,7 @@ def test_whitelisting(self): 'https://subdomain.singlecell.gi.ucsc.edu/', 'https://sub.subdomain.singlecell.gi.ucsc.edu/abc/def' ] - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) for url in urls: with self.subTest(url=url): self._shorten_query_url(url) @@ -105,7 +100,7 @@ def test_invalid_url(self): 'http://singlecell.gi.xyz.edu', 'singlecell.gi.ucsc.edu' ] - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) for url in urls: with self.subTest(url=url): self._shorten_query_url(url, expect_status=400) @@ -117,7 +112,7 @@ def test_shortened_url_matching(self): URL shortener should return the same response URL for identical input URLs """ url = 'https://singlecell.gi.ucsc.edu' - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) shortened_url1 = self._shorten_query_url(url) shortened_url2 = self._shorten_query_url(url) self.assertEqual(shortened_url1, shortened_url2) @@ -131,7 +126,7 @@ def test_shortened_url_collision(self): """ with mock.patch.object(self.app_module, 'hash_url') as hash_url: hash_url.return_value = 'abcde' - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) self.assertEqual(self._shorten_query_url('https://singlecell.gi.ucsc.edu')['url'], f'http://{config.url_redirect_full_domain_name}/abc') diff --git a/test/service/test_repository_proxy.py b/test/service/test_repository_proxy.py index a83c73d8b5..8beb7cd556 100644 --- a/test/service/test_repository_proxy.py +++ b/test/service/test_repository_proxy.py @@ -56,9 +56,6 @@ from azul.terra import ( TerraClient, ) -from retorts import ( - ResponsesHelper, -) from service import ( DSSUnitTestCase, ) @@ -202,7 +199,10 @@ def test_repository_files_proxy(self, dss_direct_access_role): "847325b6") bucket_name = 'org-humancellatlas-dss-checkout-staging' s3 = aws.client('s3') - s3.create_bucket(Bucket=bucket_name) + s3.create_bucket(Bucket=bucket_name, + CreateBucketConfiguration={ + 'LocationConstraint': config.region + }) s3.upload_fileobj(Bucket=bucket_name, Fileobj=io.BytesIO(b'foo'), Key=key) file_uuid = '701c9a63-23da-4978-946b-7576b6ad088a' file_version = '2018-09-12T121154.054628Z' @@ -230,7 +230,7 @@ def test_repository_files_proxy(self, dss_direct_access_role): ('foo bar.txt', 'grbM6udwp0n/QE/L/RYfjtQCS/U='), ('foo&bar.txt', 'r4C8YxpJ4nXTZh+agBsfhZ2e7fI=')]: with self.subTest(fetch=fetch, file_name=file_name, wait=wait): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) fixed_time = 1547691253.07010 expires = str(round(fixed_time + 3600)) diff --git a/test/service/test_storage_service.py b/test/service/test_storage_service.py index 9725a11924..36d1e7f607 100644 --- a/test/service/test_storage_service.py +++ b/test/service/test_storage_service.py @@ -16,11 +16,13 @@ from azul.service.storage_service import ( MultipartUploadError, MultipartUploadHandler, - StorageService, ) from azul_test_case import ( AzulUnitTestCase, ) +from service import ( + StorageServiceTestCase, +) # noinspection PyPep8Naming @@ -28,27 +30,29 @@ def setUpModule(): configure_test_logging() -class StorageServiceTest(AzulUnitTestCase): +class StorageServiceTest(AzulUnitTestCase, StorageServiceTestCase): """ Functional Test for Storage Service """ - storage_service: StorageService @mock_s3 @mock_sts def test_upload_tags(self): - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() object_key = 'test_file' with tempfile.NamedTemporaryFile('w') as f: f.write('some contents') for tags in (None, {}, {'Name': 'foo', 'game': 'bar'}): with self.subTest(tags=tags): - storage_service.upload(file_path=f.name, object_key=object_key, tagging=tags) + self.storage_service.upload(file_path=f.name, + object_key=object_key, + tagging=tags) if tags is None: tags = {} - self.assertEqual(tags, storage_service.get_object_tagging(object_key)) + upload_tags = self.storage_service.get_object_tagging(object_key) + self.assertEqual(tags, + upload_tags) @mock_s3 @mock_sts @@ -56,27 +60,25 @@ def test_simple_get_put(self): sample_key = 'foo-simple' sample_content = b'bar' - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() # NOTE: Ensure that the key does not exist before writing. - with self.assertRaises(storage_service.client.exceptions.NoSuchKey): - storage_service.get(sample_key) + with self.assertRaises(self.storage_service.client.exceptions.NoSuchKey): + self.storage_service.get(sample_key) - storage_service.put(sample_key, sample_content) + self.storage_service.put(sample_key, sample_content) - self.assertEqual(sample_content, storage_service.get(sample_key)) + self.assertEqual(sample_content, self.storage_service.get(sample_key)) @mock_s3 @mock_sts def test_simple_get_unknown_item(self): sample_key = 'foo-simple' - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() - with self.assertRaises(storage_service.client.exceptions.NoSuchKey): - storage_service.get(sample_key) + with self.assertRaises(self.storage_service.client.exceptions.NoSuchKey): + self.storage_service.get(sample_key) @mock_s3 @mock_sts @@ -84,13 +86,13 @@ def test_presigned_url(self): sample_key = 'foo-presigned-url' sample_content = json.dumps({"a": 1}) - storage_service = StorageService() - storage_service.create_bucket() - storage_service.put(sample_key, sample_content.encode()) + self.storage_service.create_bucket() + self.storage_service.put(sample_key, sample_content.encode()) for file_name in None, 'foo.json': with self.subTest(file_name=file_name): - presigned_url = storage_service.get_presigned_url(sample_key, file_name=file_name) + presigned_url = self.storage_service.get_presigned_url(sample_key, + file_name=file_name) response = requests.get(presigned_url) if file_name is None: self.assertNotIn('Content-Disposition', response.headers) @@ -112,13 +114,12 @@ def test_multipart_upload_ok_with_one_part(self): ] expected_content = b"".join(sample_content_parts) - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with MultipartUploadHandler(sample_key) as upload: for part in sample_content_parts: upload.push(part) - self.assertEqual(expected_content, storage_service.get(sample_key)) + self.assertEqual(expected_content, self.storage_service.get(sample_key)) @mock_s3 @mock_sts @@ -131,13 +132,13 @@ def test_multipart_upload_ok_with_n_parts(self): ] expected_content = b''.join(sample_content_parts) - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with MultipartUploadHandler(sample_key) as upload: for part in sample_content_parts: upload.push(part) - self.assertEqual(expected_content, storage_service.get(sample_key)) + self.assertEqual(expected_content, + self.storage_service.get(sample_key)) @mock_s3 @mock_sts @@ -149,8 +150,7 @@ def test_multipart_upload_error_with_out_of_bound_part(self): b'c' * 1024 ] - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with self.assertRaises(MultipartUploadError): with MultipartUploadHandler(sample_key) as upload: @@ -168,8 +168,7 @@ def test_multipart_upload_error_inside_context_with_nothing_pushed(self): b'c' * 1024 ] - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with self.assertRaises(MultipartUploadError): with MultipartUploadHandler(sample_key) as upload: for part in sample_content_parts: @@ -184,8 +183,7 @@ def test_multipart_upload_error_inside_thread_with_nothing_pushed(self): b'b' * 5242880 ] - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with patch.object(MultipartUploadHandler, '_upload_part', side_effect=RuntimeError('test')): with self.assertRaises(MultipartUploadError): with MultipartUploadHandler(sample_key) as upload: diff --git a/test/test_doctests.py b/test/test_doctests.py index ba19e82175..4ef0d6d399 100644 --- a/test/test_doctests.py +++ b/test/test_doctests.py @@ -41,7 +41,6 @@ import azul.types import azul.uuids import azul.vendored.frozendict -import retorts # noinspection PyPep8Naming @@ -84,7 +83,6 @@ def load_tests(_loader, tests, _ignore): azul.types, azul.uuids, azul.vendored.frozendict, - retorts, load_app_module('service'), load_script('check_branch'), load_script('envhook'), diff --git a/test/version_table_test_case.py b/test/version_table_test_case.py index c52a456cdd..a75b27e732 100644 --- a/test/version_table_test_case.py +++ b/test/version_table_test_case.py @@ -30,7 +30,6 @@ def setUp(self): self.ddb_client.create_table(TableName=config.dynamo_object_version_table_name, AttributeDefinitions=[ dict(AttributeName=VersionService.key_name, AttributeType='S'), - dict(AttributeName=VersionService.value_name, AttributeType='S') ], KeySchema=[ dict(AttributeName=VersionService.key_name, KeyType='HASH'),