From c411f758c0a5bc345976dbba234ab96257559c23 Mon Sep 17 00:00:00 2001 From: amar jandu Date: Wed, 10 Mar 2021 14:10:38 -0800 Subject: [PATCH 1/9] [R 1/9] Update moto to 2.0.6 (#1718) Update requirements Add FIXME (#3035) --- requirements.dev.trans.txt | 23 +++++++++++------------ requirements.dev.txt | 6 ++++-- requirements.trans.txt | 18 +++++++++--------- requirements.txt | 4 ++-- test/indexer/test_indexer_controller.py | 4 +++- 5 files changed, 29 insertions(+), 26 deletions(-) diff --git a/requirements.dev.trans.txt b/requirements.dev.trans.txt index 67b41616ff..27054993a0 100644 --- a/requirements.dev.trans.txt +++ b/requirements.dev.trans.txt @@ -1,28 +1,26 @@ argh==0.26.2 arrow==0.12.1 aws-sam-translator==1.35.0 -aws-xray-sdk==2.7.0 -cfn-lint==0.48.2 +aws-xray-sdk==2.8.0 +cfn-lint==0.49.2 click==7.1.2 -colorama==0.4.1 -configargparse==1.4 -datetime==4.3 +colorama==0.4.3 +configargparse==1.4.1 decorator==4.4.2 docker-pycreds==0.4.0 ecdsa==0.14.1 enum-compat==0.0.3 flask-basicauth==0.2.0 -flask==1.1.2 +flask==2.0.0 geventhttpclient==1.4.4 google-auth-httplib2==0.1.0 -greenlet==1.0.0 +greenlet==1.1.0 httplib2==0.19.1 -itsdangerous==1.1.0 -jsondiff==1.1.2 +itsdangerous==2.0.1 +jsondiff==1.3.0 jsonpatch==1.32 junit-xml==1.9 mccabe==0.6.1 -mock==4.0.3 msgpack==1.0.2 mypy-extensions==0.4.3 networkx==2.5.1 @@ -39,7 +37,8 @@ smmap2==3.0.1 smmap==4.0.0 sshpubkeys==3.3.1 uritemplate==3.0.1 -websocket-client==0.58.0 +websocket-client==1.0.0 xmltodict==0.12.0 +zipp==3.4.1 zope.event==4.5.0 -zope.interface==5.3.0 +zope.interface==5.4.0 diff --git a/requirements.dev.txt b/requirements.dev.txt index e6235e0c99..33a945e815 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,4 +1,4 @@ -awscli==1.16.283 +awscli==1.19.33 git+git://github.com/DataBiosphere/azul-chalice@1.18.0+5#egg=chalice coverage==5.1 docker==3.4.1 @@ -9,7 +9,9 @@ gitpython==2.1.11 google-api-python-client==1.7.8 jsonschema==3.2.0 locust==1.2.3 -moto==1.3.13 # 1.3.14 does not work, possibly related to https://github.com/spulec/moto/issues/2567 +# FIXME: Update Moto when decorator ordering issue is resovled +# https://github.com/DataBiosphere/azul/issues/3035 +moto[all]==2.0.6 openapi-spec-validator==0.2.8 pygithub==1.43.5 pytest==3.2.3 diff --git a/requirements.trans.txt b/requirements.trans.txt index 5348b231cd..ae87e9d757 100644 --- a/requirements.trans.txt +++ b/requirements.trans.txt @@ -1,28 +1,28 @@ -argcomplete==1.12.2 +argcomplete==1.12.3 atomicwrites==1.4.0 bagit==1.7.0 -cachetools==4.2.1 +cachetools==4.2.2 certifi==2020.12.5 chardet==3.0.4 commonmark==0.9.1 crc32c==2.2 dcplib==2.1.2 docutils==0.15.2 -google-api-core==1.26.3 +google-api-core==1.28.0 google-auth-oauthlib==0.4.4 google-cloud-core==1.6.0 google-resumable-media==0.5.1 googleapis-common-protos==1.53.0 -idna==2.8 -jinja2==2.11.3 +idna==2.10 +jinja2==3.0.1 jsonpointer==1.14 jsonschema==3.2.0 -markupsafe==1.1.1 +markupsafe==2.0.1 oauthlib==3.1.0 orderedmultidict==1.0.1 packaging==20.9 proto-plus==1.18.1 -protobuf==3.15.8 +protobuf==3.17.0 puremagic==1.4 pycparser==2.20 pyopenssl==20.0.1 @@ -30,8 +30,8 @@ pyparsing==2.4.7 python-dateutil==2.8.1 pytz==2021.1 requests-oauthlib==1.3.0 -s3transfer==0.2.1 -six==1.15.0 +s3transfer==0.3.7 +six==1.16.0 tenacity==5.0.4 text-unidecode==1.3 tqdm==4.60.0 diff --git a/requirements.txt b/requirements.txt index 3db3aff028..5aba31afd6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,9 +3,9 @@ attrs==19.3.0 aws-requests-auth==0.4.2 bdbag==1.5.1 boltons==20.2.0 -boto3==1.10.19 +boto3==1.17.29 boto==2.49.0 -botocore==1.13.19 +botocore==1.20.33 cffi==1.14.0 cryptography==2.3.1 deprecated==1.2.9 diff --git a/test/indexer/test_indexer_controller.py b/test/indexer/test_indexer_controller.py index 1181c7ef48..b4096c928e 100644 --- a/test/indexer/test_indexer_controller.py +++ b/test/indexer/test_indexer_controller.py @@ -1,10 +1,12 @@ import json import logging +from unittest import ( + mock, +) from chalice.app import ( SQSRecord, ) -import mock from more_itertools import ( one, ) From 77c2cb4c73ca577ae48f792f96b36cd47018b3e7 Mon Sep 17 00:00:00 2001 From: amar jandu Date: Wed, 10 Mar 2021 14:15:07 -0800 Subject: [PATCH 2/9] [R 2/9] Update moto to 2.0.6 (#1718) Remove workarounds used in moto 1.3.13 --- test/azul_test_case.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/test/azul_test_case.py b/test/azul_test_case.py index c4313b84c2..472fbb3c80 100644 --- a/test/azul_test_case.py +++ b/test/azul_test_case.py @@ -18,6 +18,10 @@ Credentials, ) import botocore.session +from moto.core.models import ( + ACCOUNT_ID, + moto_api_backend, +) from azul import ( CatalogName, @@ -153,11 +157,6 @@ def tearDownClass(cls) -> None: super().tearDownClass() def setUp(self) -> None: - # FIXME: remove local import and update moto - # https://github.com/DataBiosphere/azul/issues/1718 - from moto.core import ( - moto_api_backend, - ) super().setUp() # Moto backends are reset to ensure no resources are left over if a test # fails to clean up after itself. @@ -204,12 +203,7 @@ def _mock_aws_account_id(cls): # Set AZUL_AWS_ACCOUNT_ID to what the Moto is using. This circumvents # assertion errors in azul.deployment.aws.account. cls._aws_account_id = os.environ['AZUL_AWS_ACCOUNT_ID'] - # The fake Moto account ID is defined as a constant in a Moto module - # but we cannot import any Moto modules since doing so has a bad side - # effect: https://github.com/spulec/moto/issues/2058. - # FIXME: Switch to overriding MOTO_ACCOUNT_ID as part of - # https://github.com/DataBiosphere/azul/issues/1718 - os.environ['AZUL_AWS_ACCOUNT_ID'] = '123456789012' + os.environ['AZUL_AWS_ACCOUNT_ID'] = ACCOUNT_ID @classmethod def _restore_aws_account(cls): From 5492dd9ba05d46ec53ffe3648fbcae9d747af0f5 Mon Sep 17 00:00:00 2001 From: amar jandu Date: Thu, 25 Mar 2021 13:49:08 -0700 Subject: [PATCH 3/9] [R 3/9] Update moto to 2.0.6 (#1718) Add StorageServiceTestCase --- src/azul/service/storage_service.py | 7 ++- test/health_check_test_case.py | 14 +++--- test/service/__init__.py | 14 ++++++ test/service/test_manifest.py | 16 +++---- test/service/test_query_shortener.py | 16 +++---- test/service/test_storage_service.py | 64 ++++++++++++++-------------- 6 files changed, 73 insertions(+), 58 deletions(-) diff --git a/src/azul/service/storage_service.py b/src/azul/service/storage_service.py index 9af2238143..07a90021ca 100644 --- a/src/azul/service/storage_service.py +++ b/src/azul/service/storage_service.py @@ -116,8 +116,11 @@ def get_presigned_url(self, key: str, file_name: Optional[str] = None) -> str: **({} if file_name is None else {'ResponseContentDisposition': f'attachment;filename="{file_name}"'}) }) - def create_bucket(self, bucket_name: str = None): - self.client.create_bucket(Bucket=(bucket_name or self.bucket_name)) + def create_bucket(self, bucket_name: Optional[str] = None): + self.client.create_bucket(Bucket=(bucket_name or self.bucket_name), + CreateBucketConfiguration={ + 'LocationConstraint': config.region + }) def put_object_tagging(self, object_key: str, tagging: Tagging = None): deadline = time.time() + 60 diff --git a/test/health_check_test_case.py b/test/health_check_test_case.py index 2c6276c214..45ac91fa3e 100644 --- a/test/health_check_test_case.py +++ b/test/health_check_test_case.py @@ -40,9 +40,6 @@ from azul.modules import ( load_app_module, ) -from azul.service.storage_service import ( - StorageService, -) from azul.types import ( JSON, ) @@ -52,6 +49,9 @@ from retorts import ( ResponsesHelper, ) +from service import ( + StorageServiceTestCase, +) # FIXME: This is inelegant: https://github.com/DataBiosphere/azul/issues/652 @@ -61,7 +61,10 @@ def load_tests(loader, tests, pattern): return suite -class HealthCheckTestCase(LocalAppTestCase, ElasticsearchTestCase, metaclass=ABCMeta): +class HealthCheckTestCase(LocalAppTestCase, + ElasticsearchTestCase, + StorageServiceTestCase, + metaclass=ABCMeta): endpoints = ( '/index/files?size=1', '/index/projects?size=1', @@ -127,8 +130,7 @@ def test_health_endpoint_keys(self): @mock_sts @mock_sqs def test_cached_health(self): - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() # No health object is available in S3 bucket, yielding an error with ResponsesHelper() as helper: helper.add_passthru(self.base_url) diff --git a/test/service/__init__.py b/test/service/__init__.py index 5c095971de..3da79c26a1 100644 --- a/test/service/__init__.py +++ b/test/service/__init__.py @@ -20,11 +20,15 @@ LocalAppTestCase, ) from azul import ( + cached_property, config, ) from azul.indexer import ( SourcedBundleFQID, ) +from azul.service.storage_service import ( + StorageService, +) from indexer import ( IndexerTestCase, ) @@ -120,3 +124,13 @@ def setUpClass(cls): def tearDownClass(cls): cls._dss_mock.stop() super().tearDownClass() + + +class StorageServiceTestCase(TestCase): + """ + A mixin for test cases that utilize StorageService. + """ + + @cached_property + def storage_service(self) -> StorageService: + return StorageService() diff --git a/test/service/test_manifest.py b/test/service/test_manifest.py index 1d4ad36d01..5829c403ff 100644 --- a/test/service/test_manifest.py +++ b/test/service/test_manifest.py @@ -82,9 +82,6 @@ ManifestGenerator, ManifestService, ) -from azul.service.storage_service import ( - StorageService, -) from azul.types import ( JSON, ) @@ -96,6 +93,7 @@ ) from service import ( DSSUnitTestCase, + StorageServiceTestCase, WebServiceTestCase, ) @@ -107,7 +105,8 @@ def setUpModule(): configure_test_logging(logger) -class ManifestTestCase(WebServiceTestCase): +@mock_s3 +class ManifestTestCase(WebServiceTestCase, StorageServiceTestCase): def setUp(self): super().setUp() @@ -137,7 +136,7 @@ def _get_manifest(self, format_: ManifestFormat, filters: Filters, stream=False) return requests.get(manifest.location, stream=stream) def _get_manifest_object(self, format_: ManifestFormat, filters: JSON) -> Manifest: - service = ManifestService(StorageService()) + service = ManifestService(self.storage_service) return service.get_manifest(format_=format_, catalog=self.catalog, filters=filters) @@ -156,8 +155,7 @@ def wrapper(self, *args, **kwargs): # add_passthru let's us hit the server. # See this GitHub issue and comment: https://github.com/spulec/moto/issues/1026#issuecomment-380054270 helper.add_passthru(self.base_url) - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() return test(self, *args, **kwargs) return wrapper @@ -1595,7 +1593,7 @@ def test_hash_validity(self): self._index_canned_bundle(original_fqid) filters = {'project': {'is': ['Single of human pancreas']}} old_object_keys = {} - service = ManifestService(StorageService()) + service = ManifestService(self.storage_service) for format_ in ManifestFormat: with self.subTest(msg='indexing new bundle', format_=format_): # When a new bundle is indexed and its full manifest cached, @@ -1668,7 +1666,7 @@ def test_fetch_manifest(self, get_cached_manifest): formats with a mocked return value from `get_cached_manifest`. """ manifest_url = 'https://url.to.manifest?foo=bar' - service = ManifestService(StorageService()) + service = ManifestService(self.storage_service) for format_ in ManifestFormat: with self.subTest(format_=format_): # Mock get_cached_manifest.return_value with a dummy 'location' diff --git a/test/service/test_query_shortener.py b/test/service/test_query_shortener.py index 22c46393d9..7ee5652163 100644 --- a/test/service/test_query_shortener.py +++ b/test/service/test_query_shortener.py @@ -17,8 +17,8 @@ from azul.logging import ( configure_test_logging, ) -from azul.service.storage_service import ( - StorageService, +from service import ( + StorageServiceTestCase, ) from retorts import ( ResponsesHelper, @@ -30,7 +30,7 @@ def setUpModule(): configure_test_logging() -class TestQueryShortener(LocalAppTestCase): +class TestQueryShortener(LocalAppTestCase, StorageServiceTestCase): @classmethod def lambda_name(cls) -> str: @@ -63,7 +63,7 @@ def test_valid_url(self, storage_service_get, storage_service_put): # exception classes are tied to the session and different threads use # different sessions. https://github.com/boto/botocore/issues/2238 def side_effect(_): - raise StorageService().client.exceptions.NoSuchKey({}, "") + raise self.storage_service.client.exceptions.NoSuchKey({}, "") storage_service_get.side_effect = side_effect response = self._shorten_query_url( @@ -87,7 +87,7 @@ def test_whitelisting(self): 'https://subdomain.singlecell.gi.ucsc.edu/', 'https://sub.subdomain.singlecell.gi.ucsc.edu/abc/def' ] - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) for url in urls: with self.subTest(url=url): self._shorten_query_url(url) @@ -105,7 +105,7 @@ def test_invalid_url(self): 'http://singlecell.gi.xyz.edu', 'singlecell.gi.ucsc.edu' ] - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) for url in urls: with self.subTest(url=url): self._shorten_query_url(url, expect_status=400) @@ -117,7 +117,7 @@ def test_shortened_url_matching(self): URL shortener should return the same response URL for identical input URLs """ url = 'https://singlecell.gi.ucsc.edu' - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) shortened_url1 = self._shorten_query_url(url) shortened_url2 = self._shorten_query_url(url) self.assertEqual(shortened_url1, shortened_url2) @@ -131,7 +131,7 @@ def test_shortened_url_collision(self): """ with mock.patch.object(self.app_module, 'hash_url') as hash_url: hash_url.return_value = 'abcde' - StorageService().create_bucket(config.url_redirect_full_domain_name) + self.storage_service.create_bucket(config.url_redirect_full_domain_name) self.assertEqual(self._shorten_query_url('https://singlecell.gi.ucsc.edu')['url'], f'http://{config.url_redirect_full_domain_name}/abc') diff --git a/test/service/test_storage_service.py b/test/service/test_storage_service.py index 9725a11924..36d1e7f607 100644 --- a/test/service/test_storage_service.py +++ b/test/service/test_storage_service.py @@ -16,11 +16,13 @@ from azul.service.storage_service import ( MultipartUploadError, MultipartUploadHandler, - StorageService, ) from azul_test_case import ( AzulUnitTestCase, ) +from service import ( + StorageServiceTestCase, +) # noinspection PyPep8Naming @@ -28,27 +30,29 @@ def setUpModule(): configure_test_logging() -class StorageServiceTest(AzulUnitTestCase): +class StorageServiceTest(AzulUnitTestCase, StorageServiceTestCase): """ Functional Test for Storage Service """ - storage_service: StorageService @mock_s3 @mock_sts def test_upload_tags(self): - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() object_key = 'test_file' with tempfile.NamedTemporaryFile('w') as f: f.write('some contents') for tags in (None, {}, {'Name': 'foo', 'game': 'bar'}): with self.subTest(tags=tags): - storage_service.upload(file_path=f.name, object_key=object_key, tagging=tags) + self.storage_service.upload(file_path=f.name, + object_key=object_key, + tagging=tags) if tags is None: tags = {} - self.assertEqual(tags, storage_service.get_object_tagging(object_key)) + upload_tags = self.storage_service.get_object_tagging(object_key) + self.assertEqual(tags, + upload_tags) @mock_s3 @mock_sts @@ -56,27 +60,25 @@ def test_simple_get_put(self): sample_key = 'foo-simple' sample_content = b'bar' - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() # NOTE: Ensure that the key does not exist before writing. - with self.assertRaises(storage_service.client.exceptions.NoSuchKey): - storage_service.get(sample_key) + with self.assertRaises(self.storage_service.client.exceptions.NoSuchKey): + self.storage_service.get(sample_key) - storage_service.put(sample_key, sample_content) + self.storage_service.put(sample_key, sample_content) - self.assertEqual(sample_content, storage_service.get(sample_key)) + self.assertEqual(sample_content, self.storage_service.get(sample_key)) @mock_s3 @mock_sts def test_simple_get_unknown_item(self): sample_key = 'foo-simple' - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() - with self.assertRaises(storage_service.client.exceptions.NoSuchKey): - storage_service.get(sample_key) + with self.assertRaises(self.storage_service.client.exceptions.NoSuchKey): + self.storage_service.get(sample_key) @mock_s3 @mock_sts @@ -84,13 +86,13 @@ def test_presigned_url(self): sample_key = 'foo-presigned-url' sample_content = json.dumps({"a": 1}) - storage_service = StorageService() - storage_service.create_bucket() - storage_service.put(sample_key, sample_content.encode()) + self.storage_service.create_bucket() + self.storage_service.put(sample_key, sample_content.encode()) for file_name in None, 'foo.json': with self.subTest(file_name=file_name): - presigned_url = storage_service.get_presigned_url(sample_key, file_name=file_name) + presigned_url = self.storage_service.get_presigned_url(sample_key, + file_name=file_name) response = requests.get(presigned_url) if file_name is None: self.assertNotIn('Content-Disposition', response.headers) @@ -112,13 +114,12 @@ def test_multipart_upload_ok_with_one_part(self): ] expected_content = b"".join(sample_content_parts) - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with MultipartUploadHandler(sample_key) as upload: for part in sample_content_parts: upload.push(part) - self.assertEqual(expected_content, storage_service.get(sample_key)) + self.assertEqual(expected_content, self.storage_service.get(sample_key)) @mock_s3 @mock_sts @@ -131,13 +132,13 @@ def test_multipart_upload_ok_with_n_parts(self): ] expected_content = b''.join(sample_content_parts) - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with MultipartUploadHandler(sample_key) as upload: for part in sample_content_parts: upload.push(part) - self.assertEqual(expected_content, storage_service.get(sample_key)) + self.assertEqual(expected_content, + self.storage_service.get(sample_key)) @mock_s3 @mock_sts @@ -149,8 +150,7 @@ def test_multipart_upload_error_with_out_of_bound_part(self): b'c' * 1024 ] - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with self.assertRaises(MultipartUploadError): with MultipartUploadHandler(sample_key) as upload: @@ -168,8 +168,7 @@ def test_multipart_upload_error_inside_context_with_nothing_pushed(self): b'c' * 1024 ] - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with self.assertRaises(MultipartUploadError): with MultipartUploadHandler(sample_key) as upload: for part in sample_content_parts: @@ -184,8 +183,7 @@ def test_multipart_upload_error_inside_thread_with_nothing_pushed(self): b'b' * 5242880 ] - storage_service = StorageService() - storage_service.create_bucket() + self.storage_service.create_bucket() with patch.object(MultipartUploadHandler, '_upload_part', side_effect=RuntimeError('test')): with self.assertRaises(MultipartUploadError): with MultipartUploadHandler(sample_key) as upload: From 63cf7221415586c45e367d5247367c8566cbb3f5 Mon Sep 17 00:00:00 2001 From: amar jandu Date: Wed, 17 Mar 2021 15:05:32 -0700 Subject: [PATCH 4/9] [R 4/9] Update moto to 2.0.6 (#1718) Remove ResponseHelper usage in tests --- attic/cart/test_cart_export_service.py | 12 ++----- {test => attic/test}/retorts.py | 0 test/health_check_test_case.py | 34 +++++++++++-------- test/indexer/test_hca_indexer.py | 21 +++++------- test/service/test_collection_data_access.py | 37 ++++++--------------- test/service/test_drs.py | 12 ++----- test/service/test_manifest.py | 12 ++----- test/service/test_manifest_async.py | 6 ++-- test/service/test_query_shortener.py | 17 ++++------ test/service/test_repository_proxy.py | 5 +-- test/test_doctests.py | 2 -- 11 files changed, 57 insertions(+), 101 deletions(-) rename {test => attic/test}/retorts.py (100%) diff --git a/attic/cart/test_cart_export_service.py b/attic/cart/test_cart_export_service.py index 72f60209fd..d43f9368d9 100644 --- a/attic/cart/test_cart_export_service.py +++ b/attic/cart/test_cart_export_service.py @@ -18,9 +18,6 @@ from azul.service.collection_data_access import ( CollectionDataAccess, ) -from retorts import ( - ResponsesHelper, -) @skipIf(config.dss_endpoint is None, @@ -89,7 +86,6 @@ def mock_get_paginable_cart_items(**kwargs): self.assertEqual(2, len(content_items)) self.assertIn(expected_content_item_1, content_items) - @responses.activate @patch('azul.deployment.aws.dynamo') def test_export_create_new_collection(self, _dynamodb_client): expected_collection = dict(uuid='abc', version='123') @@ -98,7 +94,7 @@ def test_export_create_new_collection(self, _dynamodb_client): service = CartExportService() with patch.object(service.cart_item_manager, 'get_cart', side_effect=[dict(CartName='abc123')]): with patch.object(service, 'get_content', side_effect=[expected_get_content_result]): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.Response(responses.PUT, CollectionDataAccess.endpoint_url('collections'), status=201, @@ -114,7 +110,6 @@ def test_export_create_new_collection(self, _dynamodb_client): self.assertEqual(expected_get_content_result['resume_token'], result['resume_token']) self.assertEqual(len(expected_get_content_result['items']), result['exported_item_count']) - @responses.activate @patch('azul.deployment.aws.dynamo') def test_export_append_items_to_collection_ok(self, _dynamodb_client): expected_collection = dict(uuid='abc', version='123') @@ -122,7 +117,7 @@ def test_export_append_items_to_collection_ok(self, _dynamodb_client): items=[1, 2, 3, 4]) # NOTE: This is just for the test. service = CartExportService() with patch.object(service, 'get_content', side_effect=[expected_get_content_result]): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.Response( responses.PATCH, CollectionDataAccess.endpoint_url('collections', expected_collection['uuid']), @@ -139,7 +134,6 @@ def test_export_append_items_to_collection_ok(self, _dynamodb_client): self.assertEqual(expected_get_content_result['resume_token'], result['resume_token']) self.assertEqual(len(expected_get_content_result['items']), result['exported_item_count']) - @responses.activate @patch('azul.deployment.aws.dynamo') def test_export_append_items_to_collection_raises_expired_access_token_error(self, _dynamodb_client): expected_collection = dict(uuid='abc', version='123') @@ -148,7 +142,7 @@ def test_export_append_items_to_collection_raises_expired_access_token_error(sel service = CartExportService() with self.assertRaises(ExpiredAccessTokenError): with patch.object(service, 'get_content', side_effect=[expected_get_content_result]): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = CollectionDataAccess.endpoint_url('collections', expected_collection['uuid']) helper.add(responses.Response(responses.PATCH, url, status=401, json=dict(code='abc'))) service.export(export_id='export1', diff --git a/test/retorts.py b/attic/test/retorts.py similarity index 100% rename from test/retorts.py rename to attic/test/retorts.py diff --git a/test/health_check_test_case.py b/test/health_check_test_case.py index 45ac91fa3e..ae5b76c186 100644 --- a/test/health_check_test_case.py +++ b/test/health_check_test_case.py @@ -46,9 +46,6 @@ from es_test_case import ( ElasticsearchTestCase, ) -from retorts import ( - ResponsesHelper, -) from service import ( StorageServiceTestCase, ) @@ -118,7 +115,7 @@ def test_health_endpoint_keys(self): self._create_mock_queues() for keys, expected_response in expected.items(): with self.subTest(msg=keys): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) self._mock_other_lambdas(helper, up=True) with self._mock_service_endpoints(helper, endpoint_states): @@ -132,7 +129,7 @@ def test_health_endpoint_keys(self): def test_cached_health(self): self.storage_service.create_bucket() # No health object is available in S3 bucket, yielding an error - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) response = requests.get(self.base_url + '/health/cached') self.assertEqual(500, response.status_code) @@ -142,7 +139,7 @@ def test_cached_health(self): self._create_mock_queues() endpoint_states = self._endpoint_states() app = load_app_module(self.lambda_name()) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) with self._mock_service_endpoints(helper, endpoint_states): app.update_health_cache(MagicMock(), MagicMock()) @@ -151,17 +148,16 @@ def test_cached_health(self): # Another failure is observed when the cache health object is older than 2 minutes future_time = time.time() + 3 * 60 - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) with patch('time.time', new=lambda: future_time): response = requests.get(self.base_url + '/health/cached') self.assertEqual(500, response.status_code) self.assertEqual('ChaliceViewError: Cached health object is stale', response.json()['Message']) - @responses.activate def test_laziness(self): # Note the absence of moto decorators on this test. - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) self._mock_other_lambdas(helper, up=True) # If Health weren't lazy, it would fail due the lack of mocks for SQS. @@ -200,7 +196,9 @@ def _expected_queues(self, up: bool) -> JSON: 'delayed': 0, 'invisible': 0, 'queued': 0 } } if up else { - 'up': False, 'error': 'The specified queue does not exist for this wsdl version.' + 'up': False, + 'error': f'The specified queue {queue_name} does not exist for' + f' this wsdl version.' } for queue_name in config.all_queue_names }) @@ -261,14 +259,20 @@ def _expected_elasticsearch(self, up: bool) -> JSON: } def _test(self, endpoint_states: Mapping[str, bool], lambdas_up: bool, path: str = '/health/fast'): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) self._mock_other_lambdas(helper, lambdas_up) with self._mock_service_endpoints(helper, endpoint_states): return requests.get(self.base_url + path) @contextmanager - def _mock_service_endpoints(self, helper: ResponsesHelper, endpoint_states: Mapping[str, bool]) -> None: + def _mock_service_endpoints(self, + helper: responses.RequestsMock, + endpoint_states: Mapping[str, bool]) -> None: + # Without the following attribute set to `False` the test case that + # calls this method is required to send a request to every mocked + # endpoint. + helper.assert_all_requests_are_fired = False for endpoint, endpoint_up in endpoint_states.items(): helper.add(responses.Response(method='HEAD', url=config.service_endpoint() + endpoint, @@ -276,7 +280,11 @@ def _mock_service_endpoints(self, helper: ResponsesHelper, endpoint_states: Mapp json={})) yield - def _mock_other_lambdas(self, helper: ResponsesHelper, up: bool): + def _mock_other_lambdas(self, helper: responses.RequestsMock, up: bool): + # Without the following attribute set to `False` the test case that + # calls this method is required to send a request to every mocked + # endpoint. + helper.assert_all_requests_are_fired = False for lambda_name in self._other_lambda_names(): helper.add(responses.Response(method='GET', url=config.lambda_endpoint(lambda_name) + '/health/basic', diff --git a/test/indexer/test_hca_indexer.py b/test/indexer/test_hca_indexer.py index 5e4d5a427c..8da58afc7a 100644 --- a/test/indexer/test_hca_indexer.py +++ b/test/indexer/test_hca_indexer.py @@ -100,9 +100,6 @@ from indexer import ( IndexerTestCase, ) -from retorts import ( - ResponsesHelper, -) logger = logging.getLogger(__name__) @@ -1470,16 +1467,14 @@ def test_invalid_auth_for_notification_request(self): self.assertEqual(401, response.status_code) def _test(self, body: JSON, delete: bool, valid_auth: bool) -> requests.Response: - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) - hmac_creds = {'key': b'good key', 'key_id': 'the id'} - with patch('azul.deployment.aws.get_hmac_key_and_id', return_value=hmac_creds): - if valid_auth: - auth = hmac.prepare() - else: - auth = HTTPSignatureAuth(key=b'bad key', key_id='the id') - url = furl(self.base_url, path=(self.catalog, 'delete' if delete else 'add')) - return requests.post(url.url, json=body, auth=auth) + hmac_creds = {'key': b'good key', 'key_id': 'the id'} + with patch('azul.deployment.aws.get_hmac_key_and_id', return_value=hmac_creds): + if valid_auth: + auth = hmac.prepare() + else: + auth = HTTPSignatureAuth(key=b'bad key', key_id='the id') + url = furl(self.base_url, path=(self.catalog, 'delete' if delete else 'add')) + return requests.post(url.url, json=body, auth=auth) @staticmethod def _create_mock_notifications_queue(): diff --git a/test/service/test_collection_data_access.py b/test/service/test_collection_data_access.py index 49899a7841..cbc781709d 100644 --- a/test/service/test_collection_data_access.py +++ b/test/service/test_collection_data_access.py @@ -18,9 +18,6 @@ UnauthorizedClientAccessError, UpdateError, ) -from retorts import ( - ResponsesHelper, -) @skipIf(config.dss_endpoint is None, @@ -31,12 +28,11 @@ def setUp(self): fake_access_token = 'fake_access_token' self.cda = CollectionDataAccess(fake_access_token) - @responses.activate def test_get_ok(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' fake_collection = {'hello': 'world'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.Response(responses.GET, self.cda.endpoint_url('collections', test_collection_uuid), json=fake_collection)) @@ -46,11 +42,10 @@ def test_get_ok(self): version=test_collection_version, collection=fake_collection)) - @responses.activate def test_get_raises_retrival_error(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.GET, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(567, '{}'), @@ -58,12 +53,11 @@ def test_get_raises_retrival_error(self): with self.assertRaises(RetrievalError): self.cda.get(test_collection_uuid, test_collection_version) - @responses.activate def test_create_ok(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' expected_collection = dict(uuid=test_collection_uuid, version=test_collection_version) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PUT, self.cda.endpoint_url('collections'), callback=RequestCallback(201, json.dumps(expected_collection)), @@ -71,12 +65,11 @@ def test_create_ok(self): collection = self.cda.create(test_collection_uuid, 'foo bar', 'bar', test_collection_version, []) self.assertEqual(collection, expected_collection) - @responses.activate def test_create_raises_creation_error(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' fake_dss_response = {"code": "unknown"} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PUT, self.cda.endpoint_url('collections'), callback=RequestCallback(500, json.dumps(fake_dss_response)), @@ -84,12 +77,11 @@ def test_create_raises_creation_error(self): with self.assertRaises(CreationError): self.cda.create(test_collection_uuid, 'foo bar', 'bar', test_collection_version, []) - @responses.activate def test_append_with_no_items_successful(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' expected_collection = dict(uuid=test_collection_uuid, version=test_collection_version) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PATCH, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(200, json.dumps(expected_collection)), @@ -97,12 +89,11 @@ def test_append_with_no_items_successful(self): collection = self.cda.append(test_collection_uuid, test_collection_version, []) self.assertEqual(collection, expected_collection) - @responses.activate def test_append_with_some_items_successful(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' expected_collection = dict(uuid=test_collection_uuid, version=test_collection_version) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PATCH, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(200, json.dumps(expected_collection)), @@ -114,11 +105,10 @@ def test_append_with_some_items_successful(self): dict(type='foo_n', uuid='bar_n', version='baz_n')]) self.assertEqual(collection, expected_collection) - @responses.activate def test_append_raises_update_error(self): test_collection_uuid = 'abcdef123456' test_collection_version = '1980-01-01' - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add(responses.CallbackResponse(responses.PATCH, self.cda.endpoint_url('collections', test_collection_uuid), callback=RequestCallback(405, '{}'), @@ -126,11 +116,10 @@ def test_append_raises_update_error(self): with self.assertRaises(UpdateError): self.cda.append(test_collection_uuid, test_collection_version, []) - @responses.activate def test_send_request_successful_with_auto_retry_on_http_504_timeout(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'hello_world'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, @@ -141,7 +130,6 @@ def test_send_request_successful_with_auto_retry_on_http_504_timeout(self): response = self.cda.send_request(test_collection_uuid, 'get', url, {}) self.assertEqual(response.json(), expected_response) - @responses.activate def test_send_request_successful_with_auto_retry_on_http_502(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'hello_world'} @@ -154,7 +142,7 @@ def test_send_request_successful_with_auto_retry_on_http_502(self): def mock_request_handler(_request): return mock_response_sequence.pop(0) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, @@ -163,17 +151,15 @@ def mock_request_handler(_request): response = self.cda.send_request(test_collection_uuid, 'get', url, {}) self.assertEqual(response.json(), expected_response) - @responses.activate def test_send_request_fails_after_too_many_retries(self): test_collection_uuid = 'abcdef123456' with self.assertRaises(ServerTimeoutError): self.cda.send_request(test_collection_uuid, 'get', 'fake_url', {}, delay=64) - @responses.activate def test_send_request_with_unexpected_response_code_raises_client_error(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'hello_world'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, @@ -182,11 +168,10 @@ def test_send_request_with_unexpected_response_code_raises_client_error(self): with self.assertRaises(ClientError): self.cda.send_request(test_collection_uuid, 'get', url, {}, expected_status_code=200) - @responses.activate def test_send_request_with_unexpected_response_code_raises_unauthorized_client_access_error(self): test_collection_uuid = 'abcdef123456' expected_response = {'code': 'mock_error'} - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: url = self.cda.endpoint_url(test_collection_uuid) helper.add(responses.CallbackResponse(responses.GET, url, diff --git a/test/service/test_drs.py b/test/service/test_drs.py index dbdaf51fb5..4666e6fbbd 100644 --- a/test/service/test_drs.py +++ b/test/service/test_drs.py @@ -30,9 +30,6 @@ dss_drs_object_uri, dss_drs_object_url, ) -from retorts import ( - ResponsesHelper, -) from service import ( DSSUnitTestCase, WebServiceTestCase, @@ -56,9 +53,8 @@ def tearDownClass(cls): def chalice_config(self): return ChaliceConfig.create(lambda_timeout=15) - @responses.activate def _get_data_object(self, file_uuid, file_version): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) drs_url = dss_dos_object_url(file_uuid=file_uuid, catalog=self.catalog, @@ -133,7 +129,6 @@ class DRSTest(WebServiceTestCase, DSSUnitTestCase): signed_url = 'https://org-hca-dss-checkout-prod.s3.amazonaws.com/blobs/307.a72.eb6?foo=bar&et=cetera' gs_url = 'gs://important-bucket/object/path' - @responses.activate def test_drs(self): """ Mocks the DSS backend, then uses the DRS endpoints as a client is @@ -143,7 +138,7 @@ def test_drs(self): file_version = '2018-11-02T113344.698028Z' for redirects in (0, 1, 2, 6): with self.subTest(redirects=redirects): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) self._mock_responses(helper, redirects, file_uuid, file_version=file_version) # Make first client request @@ -271,11 +266,10 @@ def _mock_responses(self, helper, redirects, file_uuid, file_version=None): helper.add(self._dss_response(file_uuid, file_version, 'aws', initial=False, _301=False)) helper.add(self._dss_response(file_uuid, file_version, 'gcp', initial=False, _301=False)) - @responses.activate def test_data_object_not_found(self): file_uuid = 'NOT_A_GOOD_IDEA' error_body = 'DRS should just proxy the DSS for error responses' - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) url = f'{config.dss_endpoint}/files/{file_uuid}' helper.add(responses.Response(method=responses.GET, diff --git a/test/service/test_manifest.py b/test/service/test_manifest.py index 5829c403ff..ac2b05c34e 100644 --- a/test/service/test_manifest.py +++ b/test/service/test_manifest.py @@ -88,9 +88,6 @@ from azul_test_case import ( AzulUnitTestCase, ) -from retorts import ( - ResponsesHelper, -) from service import ( DSSUnitTestCase, StorageServiceTestCase, @@ -150,13 +147,8 @@ def manifest_test(test): @mock_sts @mock_s3 def wrapper(self, *args, **kwargs): - with ResponsesHelper() as helper: - # moto will mock the requests.get call so we can't hit localhost; - # add_passthru let's us hit the server. - # See this GitHub issue and comment: https://github.com/spulec/moto/issues/1026#issuecomment-380054270 - helper.add_passthru(self.base_url) - self.storage_service.create_bucket() - return test(self, *args, **kwargs) + self.storage_service.create_bucket() + return test(self, *args, **kwargs) return wrapper diff --git a/test/service/test_manifest_async.py b/test/service/test_manifest_async.py index 6901735c74..1260c9ff32 100644 --- a/test/service/test_manifest_async.py +++ b/test/service/test_manifest_async.py @@ -15,6 +15,7 @@ mock_sts, ) import requests +import responses from app_test_case import ( LocalAppTestCase, @@ -44,9 +45,6 @@ from azul_test_case import ( AzulUnitTestCase, ) -from retorts import ( - ResponsesHelper, -) # noinspection PyPep8Naming @@ -175,7 +173,7 @@ def test(self, mock_uuid, mock_helper): service = load_app_module('service') # In a LocalAppTestCase we need the actual state machine name state_machine_name = config.state_machine_name(service.generate_manifest.name) - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) for fetch in (True, False): with self.subTest(fetch=fetch): diff --git a/test/service/test_query_shortener.py b/test/service/test_query_shortener.py index 7ee5652163..a2344389ca 100644 --- a/test/service/test_query_shortener.py +++ b/test/service/test_query_shortener.py @@ -20,9 +20,6 @@ from service import ( StorageServiceTestCase, ) -from retorts import ( - ResponsesHelper, -) # noinspection PyPep8Naming @@ -37,14 +34,12 @@ def lambda_name(cls) -> str: return 'service' def _shorten_query_url(self, url, expect_status=None): - with ResponsesHelper() as helper: - helper.add_passthru(self.base_url) - response = requests.post(self.base_url + '/url', json={'url': url}) - if expect_status is None: - response.raise_for_status() - else: - self.assertEqual(response.status_code, expect_status) - return response.json() + response = requests.post(self.base_url + '/url', json={'url': url}) + if expect_status is None: + response.raise_for_status() + else: + self.assertEqual(response.status_code, expect_status) + return response.json() @mock_sts @mock_s3 diff --git a/test/service/test_repository_proxy.py b/test/service/test_repository_proxy.py index a83c73d8b5..084753f210 100644 --- a/test/service/test_repository_proxy.py +++ b/test/service/test_repository_proxy.py @@ -56,9 +56,6 @@ from azul.terra import ( TerraClient, ) -from retorts import ( - ResponsesHelper, -) from service import ( DSSUnitTestCase, ) @@ -230,7 +227,7 @@ def test_repository_files_proxy(self, dss_direct_access_role): ('foo bar.txt', 'grbM6udwp0n/QE/L/RYfjtQCS/U='), ('foo&bar.txt', 'r4C8YxpJ4nXTZh+agBsfhZ2e7fI=')]: with self.subTest(fetch=fetch, file_name=file_name, wait=wait): - with ResponsesHelper() as helper: + with responses.RequestsMock() as helper: helper.add_passthru(self.base_url) fixed_time = 1547691253.07010 expires = str(round(fixed_time + 3600)) diff --git a/test/test_doctests.py b/test/test_doctests.py index ba19e82175..4ef0d6d399 100644 --- a/test/test_doctests.py +++ b/test/test_doctests.py @@ -41,7 +41,6 @@ import azul.types import azul.uuids import azul.vendored.frozendict -import retorts # noinspection PyPep8Naming @@ -84,7 +83,6 @@ def load_tests(_loader, tests, _ignore): azul.types, azul.uuids, azul.vendored.frozendict, - retorts, load_app_module('service'), load_script('check_branch'), load_script('envhook'), From 94ebeef519359d9d0e8c935d5c630854a2d9e1c7 Mon Sep 17 00:00:00 2001 From: amar jandu Date: Thu, 18 Mar 2021 15:13:23 -0700 Subject: [PATCH 5/9] [R 5/9] Update moto to 2.0.6 (#1718) Fix create_table in version_table_test_case --- test/version_table_test_case.py | 1 - 1 file changed, 1 deletion(-) diff --git a/test/version_table_test_case.py b/test/version_table_test_case.py index c52a456cdd..a75b27e732 100644 --- a/test/version_table_test_case.py +++ b/test/version_table_test_case.py @@ -30,7 +30,6 @@ def setUp(self): self.ddb_client.create_table(TableName=config.dynamo_object_version_table_name, AttributeDefinitions=[ dict(AttributeName=VersionService.key_name, AttributeType='S'), - dict(AttributeName=VersionService.value_name, AttributeType='S') ], KeySchema=[ dict(AttributeName=VersionService.key_name, KeyType='HASH'), From e28196bf2652930ade01395b7a5bf7f36e6d45af Mon Sep 17 00:00:00 2001 From: amar jandu Date: Thu, 18 Mar 2021 15:28:53 -0700 Subject: [PATCH 6/9] [R 6/9] Update moto to 2.0.6 (#1718) Update botocore exception for portal_service --- src/azul/portal_service.py | 14 ++++++++++++++ test/service/test_portal_service.py | 7 ++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/src/azul/portal_service.py b/src/azul/portal_service.py index 1e394d45dd..e0055f79ed 100644 --- a/src/azul/portal_service.py +++ b/src/azul/portal_service.py @@ -14,6 +14,10 @@ cast, ) +from botocore.exceptions import ( + ClientError, +) + from azul import ( cached_property, config, @@ -197,7 +201,17 @@ def _read_db(self, version: str) -> JSONs: Key=self.object_key, VersionId=version) except self.client.exceptions.NoSuchKey: + # We hypothesize that when S3 was only eventually consistent, + # NoSuchKey would have been raised when an object had been + # created but hadn't materialized yet … raise NoSuchObjectVersion(version) + except ClientError as e: + # … and that NoSuchVersion would have been raised when the object had + # been overwritten but the overwrite hadn't materialized yet. + if e.response['Error']['Code'] == 'NoSuchVersion': + raise NoSuchObjectVersion(version) + else: + raise else: json_bytes = response['Body'].read() return json.loads(json_bytes.decode()) diff --git a/test/service/test_portal_service.py b/test/service/test_portal_service.py index 99ce7d506c..7a056f26c5 100644 --- a/test/service/test_portal_service.py +++ b/test/service/test_portal_service.py @@ -143,7 +143,12 @@ def test_internal_crud(self): with self.subTest('read'): read_db = self.portal_service._read_db(version) self.assertEqual(read_db, download_db) - self.assertRaises(NoSuchObjectVersion, self.portal_service._read_db, 'fake_version') + # The version identifier below is syntactically correct, but does + # not refer to any real version of any S3 object. + # See also https://github.com/spulec/moto/issues/3884 + self.assertRaises(NoSuchObjectVersion, + self.portal_service._read_db, + 'VWVT9JkWTreQ95JbRmQt6T3LWrljLpRZ') with self.subTest('update'): version = self.portal_service._write_db(self.dummy_db, version) From 4136dab427c9628742c4da4d7e6e6fa0ba589330 Mon Sep 17 00:00:00 2001 From: amar jandu Date: Mon, 29 Mar 2021 11:31:13 -0700 Subject: [PATCH 7/9] [R 7/9] Update moto to 2.0.6 (#1718) Add LocationConstraint to create_bucket in tests --- test/service/test_portal_service.py | 5 ++++- test/service/test_repository_proxy.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/test/service/test_portal_service.py b/test/service/test_portal_service.py index 7a056f26c5..55926617ca 100644 --- a/test/service/test_portal_service.py +++ b/test/service/test_portal_service.py @@ -96,7 +96,10 @@ def setUp(self): self.portal_service = PortalService() self.s3_client = aws.client('s3') - self.s3_client.create_bucket(Bucket=self.portal_service.bucket) + self.s3_client.create_bucket(Bucket=self.portal_service.bucket, + CreateBucketConfiguration={ + 'LocationConstraint': config.region + }) self.s3_client.put_bucket_versioning(Bucket=self.portal_service.bucket, VersioningConfiguration={ 'Status': 'Enabled', diff --git a/test/service/test_repository_proxy.py b/test/service/test_repository_proxy.py index 084753f210..8beb7cd556 100644 --- a/test/service/test_repository_proxy.py +++ b/test/service/test_repository_proxy.py @@ -199,7 +199,10 @@ def test_repository_files_proxy(self, dss_direct_access_role): "847325b6") bucket_name = 'org-humancellatlas-dss-checkout-staging' s3 = aws.client('s3') - s3.create_bucket(Bucket=bucket_name) + s3.create_bucket(Bucket=bucket_name, + CreateBucketConfiguration={ + 'LocationConstraint': config.region + }) s3.upload_fileobj(Bucket=bucket_name, Fileobj=io.BytesIO(b'foo'), Key=key) file_uuid = '701c9a63-23da-4978-946b-7576b6ad088a' file_version = '2018-09-12T121154.054628Z' From a9e5dd545927ab79b32738a1374f9d66e31a3863 Mon Sep 17 00:00:00 2001 From: amar jandu Date: Mon, 29 Mar 2021 17:18:23 -0700 Subject: [PATCH 8/9] [R 8/9] Update moto to 2.0.6 (#1718) Remove TestPortalService infra teardown --- test/service/test_portal_service.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/test/service/test_portal_service.py b/test/service/test_portal_service.py index 55926617ca..fa06c0b773 100644 --- a/test/service/test_portal_service.py +++ b/test/service/test_portal_service.py @@ -106,21 +106,6 @@ def setUp(self): 'MFADelete': 'Disabled' }) - def tearDown(self): - super().tearDown() - - # To ensure that the bucket is cleared between tests, all versions - # must be deleted. The most convenient way to do this is just to - # disabling versioning and perform a single delete. - self.s3_client.put_bucket_versioning(Bucket=self.portal_service.bucket, - VersioningConfiguration={ - 'Status': 'Disabled', - 'MFADelete': 'Disabled' - }) - self.s3_client.delete_object(Bucket=self.portal_service.bucket, - Key=self.portal_service.object_key) - self.s3_client.delete_bucket(Bucket=self.portal_service.bucket) - def download_db(self) -> JSONs: response = self.s3_client.get_object(Bucket=self.portal_service.bucket, Key=self.portal_service.object_key) From 0df9bc7f1073366862496bd2bb55ceea437a3402 Mon Sep 17 00:00:00 2001 From: Hannes Schmidt Date: Wed, 19 May 2021 23:02:34 -0700 Subject: [PATCH 9/9] [R 9/9] Update moto to 2.0.6 (#1718) Refactor response mock usage in health check tests --- test/health_check_test_case.py | 62 +++++++++++++++------------------- 1 file changed, 27 insertions(+), 35 deletions(-) diff --git a/test/health_check_test_case.py b/test/health_check_test_case.py index ae5b76c186..c9fb7e76e8 100644 --- a/test/health_check_test_case.py +++ b/test/health_check_test_case.py @@ -2,9 +2,6 @@ ABCMeta, abstractmethod, ) -from contextlib import ( - contextmanager, -) import os import time from typing import ( @@ -115,13 +112,12 @@ def test_health_endpoint_keys(self): self._create_mock_queues() for keys, expected_response in expected.items(): with self.subTest(msg=keys): - with responses.RequestsMock() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: self._mock_other_lambdas(helper, up=True) - with self._mock_service_endpoints(helper, endpoint_states): - response = requests.get(self.base_url + '/health/' + keys) - self.assertEqual(200, response.status_code) - self.assertEqual(expected_response, response.json()) + self._mock_service_endpoints(helper, endpoint_states) + response = requests.get(self.base_url + '/health/' + keys) + self.assertEqual(200, response.status_code) + self.assertEqual(expected_response, response.json()) @mock_s3 @mock_sts @@ -129,8 +125,7 @@ def test_health_endpoint_keys(self): def test_cached_health(self): self.storage_service.create_bucket() # No health object is available in S3 bucket, yielding an error - with responses.RequestsMock() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: response = requests.get(self.base_url + '/health/cached') self.assertEqual(500, response.status_code) self.assertEqual('ChaliceViewError: Cached health object does not exist', response.json()['Message']) @@ -139,17 +134,15 @@ def test_cached_health(self): self._create_mock_queues() endpoint_states = self._endpoint_states() app = load_app_module(self.lambda_name()) - with responses.RequestsMock() as helper: - helper.add_passthru(self.base_url) - with self._mock_service_endpoints(helper, endpoint_states): - app.update_health_cache(MagicMock(), MagicMock()) - response = requests.get(self.base_url + '/health/cached') - self.assertEqual(200, response.status_code) + with self.helper() as helper: + self._mock_service_endpoints(helper, endpoint_states) + app.update_health_cache(MagicMock(), MagicMock()) + response = requests.get(self.base_url + '/health/cached') + self.assertEqual(200, response.status_code) # Another failure is observed when the cache health object is older than 2 minutes future_time = time.time() + 3 * 60 - with responses.RequestsMock() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: with patch('time.time', new=lambda: future_time): response = requests.get(self.base_url + '/health/cached') self.assertEqual(500, response.status_code) @@ -157,8 +150,7 @@ def test_cached_health(self): def test_laziness(self): # Note the absence of moto decorators on this test. - with responses.RequestsMock() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: self._mock_other_lambdas(helper, up=True) # If Health weren't lazy, it would fail due the lack of mocks for SQS. response = requests.get(self.base_url + '/health/other_lambdas') @@ -259,32 +251,32 @@ def _expected_elasticsearch(self, up: bool) -> JSON: } def _test(self, endpoint_states: Mapping[str, bool], lambdas_up: bool, path: str = '/health/fast'): - with responses.RequestsMock() as helper: - helper.add_passthru(self.base_url) + with self.helper() as helper: self._mock_other_lambdas(helper, lambdas_up) - with self._mock_service_endpoints(helper, endpoint_states): - return requests.get(self.base_url + path) + self._mock_service_endpoints(helper, endpoint_states) + return requests.get(self.base_url + path) + + def helper(self): + helper = responses.RequestsMock() + helper.add_passthru(self.base_url) + # We originally shared the Requests mock with Moto which had this set + # to False. Because of that, and without noticing, we ended up mocking + # more responses than necessary for some of the tests. Instead of + # rewriting the tests to only mock what is actually used, we simply + # disable the assertion, just like Moto did. + helper.assert_all_requests_are_fired = False + return helper - @contextmanager def _mock_service_endpoints(self, helper: responses.RequestsMock, endpoint_states: Mapping[str, bool]) -> None: - # Without the following attribute set to `False` the test case that - # calls this method is required to send a request to every mocked - # endpoint. - helper.assert_all_requests_are_fired = False for endpoint, endpoint_up in endpoint_states.items(): helper.add(responses.Response(method='HEAD', url=config.service_endpoint() + endpoint, status=200 if endpoint_up else 503, json={})) - yield def _mock_other_lambdas(self, helper: responses.RequestsMock, up: bool): - # Without the following attribute set to `False` the test case that - # calls this method is required to send a request to every mocked - # endpoint. - helper.assert_all_requests_are_fired = False for lambda_name in self._other_lambda_names(): helper.add(responses.Response(method='GET', url=config.lambda_endpoint(lambda_name) + '/health/basic',