From d50664880eb387343e40e25acf0f4eda3487576d Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 24 Jun 2021 13:34:28 -0700 Subject: [PATCH 01/24] chore: Increase awareness of same file warning during package (#2946) * chore: increase awareness of same file warning during package * fix formatting & grammar Co-authored-by: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> --- samcli/lib/package/s3_uploader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 4a64a983d0..34ac666b86 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -85,7 +85,7 @@ def upload(self, file_name: str, remote_path: str) -> str: # Check if a file with same data exists if not self.force_upload and self.file_exists(remote_path): - LOG.debug("File with same data is already exists at %s. " "Skipping upload", remote_path) + LOG.info("File with same data already exists at %s, skipping upload", remote_path) return self.make_url(remote_path) try: From 698de67035967eff345a72fb3859bf7a06378c6b Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Date: Thu, 24 Jun 2021 16:07:13 -0700 Subject: [PATCH 02/24] fix: Allow the base64Encoded field in REST Api, skip validation of unknown fields and validate missing statusCode for Http Api (#2941) * fix API Gateway emulator: - skip validating the non allowed fields for Http Api Gateway, as it always skip the unknown fields - add base64Encoded as an allowed field for Rest Api gateway - base64 decoding will be always done for Http API gateway if the lambda response isBase64Encoded is true regardless the content-type - validate if statusCode is missing in case of Http API, and payload version 1.0 * - accept "true", "True", "false", "False" as valid isBase64Encoded values. - Validate on other isBase64Encoded Values - add more integration && unit test cases * fix lint && black issues * use smaller image to test Base64 response --- samcli/local/apigw/local_apigw_service.py | 71 ++- .../local/start_api/test_start_api.py | 49 +- .../testdata/start_api/binarydata.gif | Bin 1951 -> 49 bytes .../start_api/image_package_type/main.py | 2 +- tests/integration/testdata/start_api/main.py | 36 +- .../testdata/start_api/swagger-template.yaml | 48 ++ .../local/apigw/test_local_apigw_service.py | 441 ++++++++++++++++-- 7 files changed, 577 insertions(+), 70 deletions(-) diff --git a/samcli/local/apigw/local_apigw_service.py b/samcli/local/apigw/local_apigw_service.py index cc2684c200..5a6d397d54 100644 --- a/samcli/local/apigw/local_apigw_service.py +++ b/samcli/local/apigw/local_apigw_service.py @@ -333,7 +333,7 @@ def _request_handler(self, **kwargs): ) else: (status_code, headers, body) = self._parse_v1_payload_format_lambda_output( - lambda_response, self.api.binary_media_types, request + lambda_response, self.api.binary_media_types, request, route.event_type ) except LambdaResponseParseException as ex: LOG.error("Invalid lambda response received: %s", ex) @@ -379,13 +379,14 @@ def get_request_methods_endpoints(flask_request): # Consider moving this out to its own class. Logic is started to get dense and looks messy @jfuss @staticmethod - def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, flask_request): + def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, flask_request, event_type): """ Parses the output from the Lambda Container :param str lambda_output: Output from Lambda Invoke :param binary_types: list of binary types :param flask_request: flash request object + :param event_type: determines the route event type :return: Tuple(int, dict, str, bool) """ # pylint: disable-msg=too-many-statements @@ -397,6 +398,9 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla if not isinstance(json_output, dict): raise LambdaResponseParseException(f"Lambda returned {type(json_output)} instead of dict") + if event_type == Route.HTTP and json_output.get("statusCode") is None: + raise LambdaResponseParseException(f"Invalid API Gateway Response Key: statusCode is not in {json_output}") + status_code = json_output.get("statusCode") or 200 headers = LocalApigwService._merge_response_headers( json_output.get("headers") or {}, json_output.get("multiValueHeaders") or {} @@ -405,7 +409,8 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla body = json_output.get("body") if body is None: LOG.warning("Lambda returned empty body!") - is_base_64_encoded = json_output.get("isBase64Encoded") or False + + is_base_64_encoded = LocalApigwService.get_base_64_encoded(event_type, json_output) try: status_code = int(status_code) @@ -422,8 +427,10 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla f"Non null response bodies should be able to convert to string: {body}" ) from ex - invalid_keys = LocalApigwService._invalid_apig_response_keys(json_output) - if invalid_keys: + invalid_keys = LocalApigwService._invalid_apig_response_keys(json_output, event_type) + # HTTP API Gateway just skip the non allowed lambda response fields, but Rest API gateway fail on + # the non allowed fields + if event_type == Route.API and invalid_keys: raise LambdaResponseParseException(f"Invalid API Gateway Response Keys: {invalid_keys} in {json_output}") # If the customer doesn't define Content-Type default to application/json @@ -432,17 +439,51 @@ def _parse_v1_payload_format_lambda_output(lambda_output: str, binary_types, fla headers["Content-Type"] = "application/json" try: - if LocalApigwService._should_base64_decode_body(binary_types, flask_request, headers, is_base_64_encoded): + # HTTP API Gateway always decode the lambda response only if isBase64Encoded field in response is True + # regardless the response content-type + # Rest API Gateway depends on the response content-type and the API configured BinaryMediaTypes to decide + # if it will decode the response or not + if (event_type == Route.HTTP and is_base_64_encoded) or ( + event_type == Route.API + and LocalApigwService._should_base64_decode_body( + binary_types, flask_request, headers, is_base_64_encoded + ) + ): body = base64.b64decode(body) except ValueError as ex: LambdaResponseParseException(str(ex)) return status_code, headers, body + @staticmethod + def get_base_64_encoded(event_type, json_output): + # The following behaviour is undocumented behaviour, and based on some trials + # Http API gateway checks lambda response for isBase64Encoded field, and ignore base64Encoded + # Rest API gateway checks first the field base64Encoded field, if not exist, it checks isBase64Encoded field + + if event_type == Route.API and json_output.get("base64Encoded") is not None: + is_base_64_encoded = json_output.get("base64Encoded") + field_name = "base64Encoded" + elif json_output.get("isBase64Encoded") is not None: + is_base_64_encoded = json_output.get("isBase64Encoded") + field_name = "isBase64Encoded" + else: + is_base_64_encoded = False + field_name = "isBase64Encoded" + + if isinstance(is_base_64_encoded, str) and is_base_64_encoded in ["true", "True", "false", "False"]: + is_base_64_encoded = is_base_64_encoded in ["true", "True"] + elif not isinstance(is_base_64_encoded, bool): + raise LambdaResponseParseException( + f"Invalid API Gateway Response Key: {is_base_64_encoded} is not a valid" f"{field_name}" + ) + + return is_base_64_encoded + @staticmethod def _parse_v2_payload_format_lambda_output(lambda_output: str, binary_types, flask_request): """ - Parses the output from the Lambda Container + Parses the output from the Lambda Container. V2 Payload Format means that the event_type is only HTTP :param str lambda_output: Output from Lambda Invoke :param binary_types: list of binary types @@ -487,21 +528,15 @@ def _parse_v2_payload_format_lambda_output(lambda_output: str, binary_types, fla f"Non null response bodies should be able to convert to string: {body}" ) from ex - # API Gateway only accepts statusCode, body, headers, and isBase64Encoded in - # a response shape. - # Don't check the response keys when inferring a response, see - # https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.v2. - invalid_keys = LocalApigwService._invalid_apig_response_keys(json_output) - if "statusCode" in json_output and invalid_keys: - raise LambdaResponseParseException(f"Invalid API Gateway Response Keys: {invalid_keys} in {json_output}") - # If the customer doesn't define Content-Type default to application/json if "Content-Type" not in headers: LOG.info("No Content-Type given. Defaulting to 'application/json'.") headers["Content-Type"] = "application/json" try: - if LocalApigwService._should_base64_decode_body(binary_types, flask_request, headers, is_base_64_encoded): + # HTTP API Gateway always decode the lambda response only if isBase64Encoded field in response is True + # regardless the response content-type + if is_base_64_encoded: # Note(xinhol): here in this method we change the type of the variable body multiple times # and confused mypy, we might want to avoid this and use multiple variables here. body = base64.b64decode(body) # type: ignore @@ -511,8 +546,10 @@ def _parse_v2_payload_format_lambda_output(lambda_output: str, binary_types, fla return status_code, headers, body @staticmethod - def _invalid_apig_response_keys(output): + def _invalid_apig_response_keys(output, event_type): allowable = {"statusCode", "body", "headers", "multiValueHeaders", "isBase64Encoded", "cookies"} + if event_type == Route.API: + allowable.add("base64Encoded") invalid_keys = output.keys() - allowable return invalid_keys diff --git a/tests/integration/local/start_api/test_start_api.py b/tests/integration/local/start_api/test_start_api.py index e7e5ad59a1..0ddb8d5a31 100644 --- a/tests/integration/local/start_api/test_start_api.py +++ b/tests/integration/local/start_api/test_start_api.py @@ -1,3 +1,4 @@ +import base64 import uuid import random @@ -382,14 +383,14 @@ def test_valid_v2_lambda_integer_response(self): @pytest.mark.flaky(reruns=3) @pytest.mark.timeout(timeout=600, method="thread") - def test_invalid_v2_lambda_response(self): + def test_v2_lambda_response_skip_unexpected_fields(self): """ Patch Request to a path that was defined as ANY in SAM through AWS::Serverless::Function Events """ response = requests.get(self.url + "/invalidv2response", timeout=300) - self.assertEqual(response.status_code, 502) - self.assertEqual(response.json(), {"message": "Internal server error"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) @pytest.mark.flaky(reruns=3) @pytest.mark.timeout(timeout=600, method="thread") @@ -538,6 +539,48 @@ def test_binary_response(self): self.assertEqual(response.headers.get("Content-Type"), "image/gif") self.assertEqual(response.content, expected) + @pytest.mark.flaky(reruns=3) + @pytest.mark.timeout(timeout=600, method="thread") + def test_non_decoded_binary_response(self): + """ + Binary data is returned correctly + """ + expected = base64.b64encode(self.get_binary_data(self.binary_data_file)) + + response = requests.get(self.url + "/nondecodedbase64response", timeout=300) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) + + @pytest.mark.flaky(reruns=3) + @pytest.mark.timeout(timeout=600, method="thread") + def test_decoded_binary_response_base64encoded_field(self): + """ + Binary data is returned correctly + """ + expected = self.get_binary_data(self.binary_data_file) + + response = requests.get(self.url + "/decodedbase64responsebas64encoded", timeout=300) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) + + @pytest.mark.flaky(reruns=3) + @pytest.mark.timeout(timeout=600, method="thread") + def test_decoded_binary_response_base64encoded_field_is_priority(self): + """ + Binary data is returned correctly + """ + expected = base64.b64encode(self.get_binary_data(self.binary_data_file)) + + response = requests.get(self.url + "/decodedbase64responsebas64encodedpriority", timeout=300) + + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) + class TestStartApiWithSwaggerHttpApis(StartApiIntegBaseClass): template_path = "/testdata/start_api/swagger-template-http-api.yaml" diff --git a/tests/integration/testdata/start_api/binarydata.gif b/tests/integration/testdata/start_api/binarydata.gif index 855b4041793a49335cf6d1b66d8c1e5059daf60f..3f40c2073daf9743db59e7bec58cf90e8f6d3fbc 100644 GIT binary patch literal 49 ucmZ?wbh9u|WMp7un8*ME|Ns97(+r9~SvVOOm>6_GT#!5i6O#)ggEau*tOpVR literal 1951 zcmd^8{a2EC9{qyKi?V`(uBZf}LzuRr)~uT8Lm4uyCYqdPgu-KB=kujfc^aqiu{cg<9!rLi^agVZ!h$> zz$jFYCx9m=a9GUthWTNxaL66B_yG8`|NMEqD=_-|!(y?4Ox6(zT1Eb5Tpax2!)j>g z9D`wwi17OyOxvr~evTZfkA%)-l?nh52n16s);tl!9S)F6rSD>5a!+u_16c$D;k!RV z8z^9aphxB6?Ck7zH^wN%$>8HdrBaOm%sPT8MZT5_$V9@snVFY?feeaMT*1Yfni{yP zYl=wJvRKDC&;WxLLUH=Ev;;wrvQ`0)jFj+ry}i8?C%A>V;#vl6P%OS(F3#ieBw}%I zRMgDOjM;3C2xN_Psj^wjHyRCWGQl%56)fhTSj>l#)E0pt^Gx0w4#!|Hm`tW51`Vds zioBT+i`i9Md0mo9H5#AxTv^r+J`nPvqM{lLQpI91ES19b^>aeuV`YiA#-Ihi-RIc= zfRmGdO0^)oZ= zZWZ=Ns_vx4T>8W5pr(&^R&b56w||iby+!f97hc}8&w3#9sT>ueJmu1_ZYwy=d`7lwzjCN37$e6$bVS~u7^ zlh8$yv&lx#oXPtkVMuiR^wG<3A+4zJoA1*ia<6L`0aCBq!@iw(uT+} zEE$l+3r))KFXy@J(i0HEqvzS{?!MWe z3~{=xc(t$lS_~WEMw-)ET;RAo2T!X1ymw&I9vzQ$SPxnn%&N|~$)t!1dr0NdOJ2oW zqr>eZKiztPxEHg-4&R;7(e11?mu0k$ShT;HqjbbRt{Btj@R0Yrx4oK&qPS~wE^)CD z)zsqF@7B`k^E%(G|6E*TqgLAvU_GNX?4u3|>Jh(GHDb;?$gqohqKwA?dG`Vb> zhPF}+J$2m=?_jFmm*S`XA?!)VE5!ReSCVjIYU-HX>a&Vd(4)B;#RdZ7PRNsb=#`^; zAB2)%Xksz5X4%p+iPeYoERx$FWmsAv^-ePhTUu zK00t9)3*ublF_u!DTukt+JTW9Gsq6qMZcvv@j<*XSgs06_24GDrW{UCJtth7ycJ-V zB8o;M&~5~4yOR>yIrDXe<}899OFM)JR{#5+fG-gr*>&TgDU3Wl^>@qn7&|G1eCst` zgmqivjoONao^O|{DyyT}D8(ppXCqwZx^KLAYl^y=`0drOt2p!{7roUzg_b8glt(HR zqIXaFXeS|T_F;r)BN(=w{f7s=PGw$yc_qM3$`8=Qr>F;U8^7rQ944|pm9TE6vyM$Omm*tOa#_@eN#A Date: Mon, 28 Jun 2021 18:50:33 -0700 Subject: [PATCH 03/24] fix: pass copy of environment variables for keeping cache valid (#2943) * fix: pass copy of environment variables for keeping cache valid * add integ tests * update docs * make black happy Co-authored-by: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> --- samcli/lib/build/build_strategy.py | 7 ++++- tests/integration/buildcmd/test_build_cmd.py | 30 +++++++++++++++++++ .../lib/build_module/test_build_strategy.py | 9 ++++-- 3 files changed, 43 insertions(+), 3 deletions(-) diff --git a/samcli/lib/build/build_strategy.py b/samcli/lib/build/build_strategy.py index ecded3a743..258101ba2d 100644 --- a/samcli/lib/build/build_strategy.py +++ b/samcli/lib/build/build_strategy.py @@ -5,6 +5,7 @@ import pathlib import shutil from abc import abstractmethod, ABC +from copy import deepcopy from typing import Callable, Dict, List, Any, Optional, cast from samcli.commands.build.exceptions import MissingBuildMethodException @@ -114,6 +115,10 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini LOG.debug("Building to following folder %s", single_build_dir) + # we should create a copy and pass it down, otherwise additional env vars like LAMBDA_BUILDERS_LOG_LEVEL + # will make cache invalid all the time + container_env_vars = deepcopy(build_definition.env_vars) + # when a function is passed here, it is ZIP function, codeuri and runtime are not None result = self._build_function( build_definition.get_function_name(), @@ -123,7 +128,7 @@ def build_single_function_definition(self, build_definition: FunctionBuildDefini build_definition.get_handler_name(), single_build_dir, build_definition.metadata, - build_definition.env_vars, + container_env_vars, ) function_build_results[single_full_path] = result diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index ba25849672..4681a4f2eb 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -1346,6 +1346,36 @@ def test_cache_build(self, use_container, code_uri, function1_handler, function2 expected_messages, command_result, self._make_parameter_override_arg(overrides) ) + @skipIf(SKIP_DOCKER_TESTS, SKIP_DOCKER_MESSAGE) + def test_cached_build_with_env_vars(self): + """ + Build 2 times to verify that second time hits the cached build + """ + overrides = { + "FunctionCodeUri": "Python", + "Function1Handler": "main.first_function_handler", + "Function2Handler": "main.second_function_handler", + "FunctionRuntime": "python3.8", + } + cmdlist = self.get_command_list( + use_container=True, parameter_overrides=overrides, cached=True, container_env_var="FOO=BAR" + ) + + LOG.info("Running Command (cache should be invalid): %s", cmdlist) + command_result = run_command(cmdlist, cwd=self.working_dir) + self.assertTrue( + "Cache is invalid, running build and copying resources to function build definition" + in command_result.stderr.decode("utf-8") + ) + + LOG.info("Re-Running Command (valid cache should exist): %s", cmdlist) + command_result_with_cache = run_command(cmdlist, cwd=self.working_dir) + + self.assertTrue( + "Valid cache found, copying previously built resources from function build definition" + in command_result_with_cache.stderr.decode("utf-8") + ) + @skipIf( ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), diff --git a/tests/unit/lib/build_module/test_build_strategy.py b/tests/unit/lib/build_module/test_build_strategy.py index 7e9902a172..1fae5b7962 100644 --- a/tests/unit/lib/build_module/test_build_strategy.py +++ b/tests/unit/lib/build_module/test_build_strategy.py @@ -1,3 +1,4 @@ +from copy import deepcopy from unittest import TestCase from unittest.mock import Mock, patch, MagicMock, call, ANY @@ -218,11 +219,15 @@ def test_build_single_function_definition_image_functions_with_same_metadata(sel function2.name = "Function2" function2.full_path = "Function2" function2.packagetype = IMAGE - build_definition = FunctionBuildDefinition("3.7", "codeuri", IMAGE, {}) + build_definition = FunctionBuildDefinition("3.7", "codeuri", IMAGE, {}, env_vars={"FOO": "BAR"}) # since they have the same metadata, they are put into the same build_definition. build_definition.functions = [function1, function2] - result = default_build_strategy.build_single_function_definition(build_definition) + with patch("samcli.lib.build.build_strategy.deepcopy", wraps=deepcopy) as patched_deepcopy: + result = default_build_strategy.build_single_function_definition(build_definition) + + patched_deepcopy.assert_called_with(build_definition.env_vars) + # both of the function name should show up in results self.assertEqual(result, {"Function": built_image, "Function2": built_image}) From 6fc1b991777ad362e88e0a4064a4736a9429e37c Mon Sep 17 00:00:00 2001 From: Alexis Facques Date: Tue, 29 Jun 2021 22:56:20 +0200 Subject: [PATCH 04/24] fix: Skip build of Docker image if ImageUri is a valid ECR URL (#2934) (#2935) --- samcli/commands/deploy/guided_context.py | 2 +- samcli/lib/package/ecr_utils.py | 2 +- samcli/lib/providers/sam_base_provider.py | 28 +++ samcli/lib/providers/sam_function_provider.py | 17 +- .../local/lib/test_sam_function_provider.py | 163 ++++++++++++++---- 5 files changed, 178 insertions(+), 34 deletions(-) diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index b4bb65a0d6..dafdf0a331 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -316,7 +316,7 @@ def prompt_image_repository(self, stacks: List[Stack]): if isinstance(self.image_repositories, dict) else "" or self.image_repository, ) - if not is_ecr_url(image_repositories.get(resource_id)): + if resource_id not in image_repositories or not is_ecr_url(str(image_repositories[resource_id])): raise GuidedDeployFailedError( f"Invalid Image Repository ECR URI: {image_repositories.get(resource_id)}" ) diff --git a/samcli/lib/package/ecr_utils.py b/samcli/lib/package/ecr_utils.py index 6186d24099..f4bedc4a27 100644 --- a/samcli/lib/package/ecr_utils.py +++ b/samcli/lib/package/ecr_utils.py @@ -6,5 +6,5 @@ from samcli.lib.package.regexpr import ECR_URL -def is_ecr_url(url): +def is_ecr_url(url: str) -> bool: return bool(re.match(ECR_URL, url)) if url else False diff --git a/samcli/lib/providers/sam_base_provider.py b/samcli/lib/providers/sam_base_provider.py index c059284eb8..7a75c70cc8 100644 --- a/samcli/lib/providers/sam_base_provider.py +++ b/samcli/lib/providers/sam_base_provider.py @@ -10,6 +10,8 @@ from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable from samcli.lib.samlib.resource_metadata_normalizer import ResourceMetadataNormalizer from samcli.lib.samlib.wrapper import SamTranslatorWrapper +from samcli.lib.package.ecr_utils import is_ecr_url + LOG = logging.getLogger(__name__) @@ -34,6 +36,11 @@ class SamBaseProvider: SERVERLESS_LAYER: "ContentUri", } + IMAGE_PROPERTY_KEYS = { + LAMBDA_FUNCTION: "Code", + SERVERLESS_FUNCTION: "ImageUri", + } + def get(self, name: str) -> Optional[Any]: """ Given name of the function, this method must return the Function object @@ -88,6 +95,17 @@ def _is_s3_location(location: Optional[Union[str, Dict]]) -> bool: isinstance(location, str) and location.startswith("s3://") ) + @staticmethod + def _is_ecr_uri(location: Optional[Union[str, Dict]]) -> bool: + """ + the input could be: + - ImageUri of Serverless::Function + - Code of Lambda::Function + """ + return location is not None and is_ecr_url( + str(location.get("ImageUri", "")) if isinstance(location, dict) else location + ) + @staticmethod def _warn_code_extraction(resource_type: str, resource_name: str, code_property: str) -> None: LOG.warning( @@ -98,6 +116,16 @@ def _warn_code_extraction(resource_type: str, resource_name: str, code_property: code_property, ) + @staticmethod + def _warn_imageuri_extraction(resource_type: str, resource_name: str, image_property: str) -> None: + LOG.warning( + "The resource %s '%s' has specified ECR registry image for %s. " + "It will not be built and SAM CLI does not support invoking it locally.", + resource_type, + resource_name, + image_property, + ) + @staticmethod def _extract_lambda_function_imageuri(resource_properties: Dict, code_property_key: str) -> Optional[str]: """ diff --git a/samcli/lib/providers/sam_function_provider.py b/samcli/lib/providers/sam_function_provider.py index 7bc231f929..6bffc4bf75 100644 --- a/samcli/lib/providers/sam_function_provider.py +++ b/samcli/lib/providers/sam_function_provider.py @@ -130,13 +130,28 @@ def _extract_functions( resource_properties["Metadata"] = resource_metadata if resource_type in [SamFunctionProvider.SERVERLESS_FUNCTION, SamFunctionProvider.LAMBDA_FUNCTION]: + resource_package_type = resource_properties.get("PackageType", ZIP) + code_property_key = SamBaseProvider.CODE_PROPERTY_KEYS[resource_type] - if SamBaseProvider._is_s3_location(resource_properties.get(code_property_key)): + image_property_key = SamBaseProvider.IMAGE_PROPERTY_KEYS[resource_type] + + if resource_package_type == ZIP and SamBaseProvider._is_s3_location( + resource_properties.get(code_property_key) + ): + # CodeUri can be a dictionary of S3 Bucket/Key or a S3 URI, neither of which are supported if not ignore_code_extraction_warnings: SamFunctionProvider._warn_code_extraction(resource_type, name, code_property_key) continue + if resource_package_type == IMAGE and SamBaseProvider._is_ecr_uri( + resource_properties.get(image_property_key) + ): + # ImageUri can be an ECR uri, which is not supported + if not ignore_code_extraction_warnings: + SamFunctionProvider._warn_imageuri_extraction(resource_type, name, image_property_key) + continue + if resource_type == SamFunctionProvider.SERVERLESS_FUNCTION: layers = SamFunctionProvider._parse_layer_info( stack, diff --git a/tests/unit/commands/local/lib/test_sam_function_provider.py b/tests/unit/commands/local/lib/test_sam_function_provider.py index 3d33f1a312..9daf92abc0 100644 --- a/tests/unit/commands/local/lib/test_sam_function_provider.py +++ b/tests/unit/commands/local/lib/test_sam_function_provider.py @@ -63,10 +63,6 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, - "SamFunc4": { - "Type": "AWS::Serverless::Function", - "Properties": {"ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", "PackageType": IMAGE}, - }, "SamFuncWithFunctionNameOverride": { "Type": "AWS::Serverless::Function", "Properties": { @@ -76,6 +72,29 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, + "SamFuncWithImage1": { + "Type": "AWS::Serverless::Function", + "Properties": { + "PackageType": IMAGE, + }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, + }, + "SamFuncWithImage2": { + "Type": "AWS::Serverless::Function", + "Properties": { + "ImageUri": "image:tag", + "PackageType": IMAGE, + }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, + }, + "SamFuncWithImage3": { + # ImageUri is unsupported ECR location + "Type": "AWS::Serverless::Function", + "Properties": { + "ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo:myimage", + "PackageType": IMAGE, + }, + }, "LambdaFunc1": { "Type": "AWS::Lambda::Function", "Properties": { @@ -84,21 +103,37 @@ class TestSamFunctionProviderEndToEnd(TestCase): "Handler": "index.handler", }, }, - "LambdaFuncWithInlineCode": { + "LambdaFuncWithImage1": { "Type": "AWS::Lambda::Function", "Properties": { - "Code": {"ZipFile": "testcode"}, - "Runtime": "nodejs4.3", - "Handler": "index.handler", + "PackageType": IMAGE, + }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, + }, + "LambdaFuncWithImage2": { + "Type": "AWS::Lambda::Function", + "Properties": { + "Code": {"ImageUri": "image:tag"}, + "PackageType": IMAGE, }, + "Metadata": {"DockerTag": "tag", "DockerContext": "./image", "Dockerfile": "Dockerfile"}, }, - "LambdaFunc2": { + "LambdaFuncWithImage3": { + # ImageUri is unsupported ECR location "Type": "AWS::Lambda::Function", "Properties": { "Code": {"ImageUri": "123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo"}, "PackageType": IMAGE, }, }, + "LambdaFuncWithInlineCode": { + "Type": "AWS::Lambda::Function", + "Properties": { + "Code": {"ZipFile": "testcode"}, + "Runtime": "nodejs4.3", + "Handler": "index.handler", + }, + }, "LambdaFuncWithLocalPath": { "Type": "AWS::Lambda::Function", "Properties": {"Code": "./some/path/to/code", "Runtime": "nodejs4.3", "Handler": "index.handler"}, @@ -248,10 +283,10 @@ def setUp(self): ("SamFunc2", None), # codeuri is a s3 location, ignored ("SamFunc3", None), # codeuri is a s3 location, ignored ( - "SamFunc4", + "SamFuncWithImage1", Function( - name="SamFunc4", - functionname="SamFunc4", + name="SamFuncWithImage1", + functionname="SamFuncWithImage1", runtime=None, handler=None, codeuri=".", @@ -262,14 +297,46 @@ def setUp(self): layers=[], events=None, inlinecode=None, - imageuri="123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", + imageuri=None, imageconfig=None, packagetype=IMAGE, - metadata=None, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, codesign_config_arn=None, stack_path="", ), ), + ( + "SamFuncWithImage2", + Function( + name="SamFuncWithImage2", + functionname="SamFuncWithImage2", + runtime=None, + handler=None, + codeuri=".", + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + events=None, + inlinecode=None, + imageuri="image:tag", + imageconfig=None, + packagetype=IMAGE, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, + codesign_config_arn=None, + stack_path="", + ), + ), + ("SamFuncWithImage3", None), # imageuri is ecr location, ignored ( "SamFuncWithFunctionNameOverride-x", Function( @@ -295,33 +362,37 @@ def setUp(self): ), ("LambdaFunc1", None), # codeuri is a s3 location, ignored ( - "LambdaFuncWithInlineCode", + "LambdaFuncWithImage1", Function( - name="LambdaFuncWithInlineCode", - functionname="LambdaFuncWithInlineCode", - runtime="nodejs4.3", - handler="index.handler", - codeuri=None, + name="LambdaFuncWithImage1", + functionname="LambdaFuncWithImage1", + runtime=None, + handler=None, + codeuri=".", memory=None, timeout=None, environment=None, rolearn=None, layers=[], events=None, - metadata=None, - inlinecode="testcode", - codesign_config_arn=None, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, + inlinecode=None, imageuri=None, imageconfig=None, - packagetype=ZIP, + packagetype=IMAGE, + codesign_config_arn=None, stack_path="", ), ), ( - "LambdaFunc2", + "LambdaFuncWithImage2", Function( - name="LambdaFunc2", - functionname="LambdaFunc2", + name="LambdaFuncWithImage2", + functionname="LambdaFuncWithImage2", runtime=None, handler=None, codeuri=".", @@ -331,15 +402,43 @@ def setUp(self): rolearn=None, layers=[], events=None, - metadata=None, + metadata={ + "DockerTag": "tag", + "DockerContext": os.path.join("image"), + "Dockerfile": "Dockerfile", + }, inlinecode=None, - imageuri="123456789012.dkr.ecr.us-east-1.amazonaws.com/myrepo", + imageuri="image:tag", imageconfig=None, packagetype=IMAGE, codesign_config_arn=None, stack_path="", ), ), + ("LambdaFuncWithImage3", None), # imageuri is a ecr location, ignored + ( + "LambdaFuncWithInlineCode", + Function( + name="LambdaFuncWithInlineCode", + functionname="LambdaFuncWithInlineCode", + runtime="nodejs4.3", + handler="index.handler", + codeuri=None, + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + events=None, + metadata=None, + inlinecode="testcode", + codesign_config_arn=None, + imageuri=None, + imageconfig=None, + packagetype=ZIP, + stack_path="", + ), + ), ( "LambdaFuncWithLocalPath", Function( @@ -494,11 +593,13 @@ def test_get_all_must_return_all_functions(self): result = {posixpath.join(f.stack_path, f.name) for f in self.provider.get_all()} expected = { "SamFunctions", + "SamFuncWithImage1", + "SamFuncWithImage2", "SamFuncWithInlineCode", - "SamFunc4", "SamFuncWithFunctionNameOverride", + "LambdaFuncWithImage1", + "LambdaFuncWithImage2", "LambdaFuncWithInlineCode", - "LambdaFunc2", "LambdaFuncWithLocalPath", "LambdaFuncWithFunctionNameOverride", "LambdaFuncWithCodeSignConfig", From 0d58f89f563def47a74301cf0c92c87beb2cf706 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Wed, 30 Jun 2021 15:53:08 -0700 Subject: [PATCH 05/24] Add condition to managed bucket policy (#2999) --- samcli/lib/bootstrap/bootstrap.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index 81c30c7748..eaed58d630 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -73,6 +73,9 @@ def _get_stack_template(): - "/*" Principal: Service: serverlessrepo.amazonaws.com + Condition: + StringEquals: + aws:SourceAccount: !Ref AWS::AccountId Outputs: SourceBucket: From 50ac3cbd72c983a430e0d7a2c5271f5d7af85701 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Wed, 30 Jun 2021 20:16:23 -0700 Subject: [PATCH 06/24] Update appveyor.yml to do docker login on both dockerhub and Public ECR (#3005) (#3006) Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> --- appveyor.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 606fe62d5f..1833c78b1f 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -170,7 +170,11 @@ for: # Runs only in Linux, logging docker hub when running canary and docker cred is available - sh: " if [[ -n $BY_CANARY ]] && [[ -n $DOCKER_USER ]] && [[ -n $DOCKER_PASS ]]; - then echo Logging in Docker Hub; echo $DOCKER_PASS | docker login --username $DOCKER_USER --password-stdin; + then echo Logging in Docker Hub; echo $DOCKER_PASS | docker login --username $DOCKER_USER --password-stdin registry-1.docker.io; + fi" + - sh: " + if [[ -n $BY_CANARY ]]; + then echo Logging in Public ECR; aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws; fi" - sh: "pytest -vv tests/integration" - sh: "pytest -vv tests/regression" From f1e05695802f0c1c9f9b1b5c42cda413dac91596 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Wed, 30 Jun 2021 20:17:45 -0700 Subject: [PATCH 07/24] chore: bump version to 1.25.0 (#3007) Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index 3fe00ac134..d49ef0cf7e 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.24.1" +__version__ = "1.25.0" From 224156330fa6e862b7d7f2b3567aaf53842e7528 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Date: Wed, 30 Jun 2021 20:40:16 -0700 Subject: [PATCH 08/24] temp: reduce python testing matrix (#3008) --- appveyor.yml | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 1833c78b1f..b96017d0d3 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -9,26 +9,26 @@ environment: matrix: - - PYTHON_HOME: "C:\\Python36-x64" - PYTHON_VERSION: '3.6' - PYTHON_ARCH: '64' - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_37_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_36' - AWS_ECR: 'AWS_ECR_36' - APPVEYOR_CONSOLE_DISABLE_PTY: true - - - PYTHON_HOME: "C:\\Python37-x64" - PYTHON_VERSION: '3.7' - PYTHON_ARCH: '64' - RUN_SMOKE: 1 - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_36_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_37' - AWS_ECR: 'AWS_ECR_37' - APPVEYOR_CONSOLE_DISABLE_PTY: true + # - PYTHON_HOME: "C:\\Python36-x64" + # PYTHON_VERSION: '3.6' + # PYTHON_ARCH: '64' + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_37_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_36' + # AWS_ECR: 'AWS_ECR_36' + # APPVEYOR_CONSOLE_DISABLE_PTY: true + + # - PYTHON_HOME: "C:\\Python37-x64" + # PYTHON_VERSION: '3.7' + # PYTHON_ARCH: '64' + # RUN_SMOKE: 1 + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_36_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_37' + # AWS_ECR: 'AWS_ECR_37' + # APPVEYOR_CONSOLE_DISABLE_PTY: true - PYTHON_HOME: "C:\\Python38-x64" PYTHON_VERSION: '3.8' @@ -159,7 +159,7 @@ for: # Pre-dev Tests - "pip install -e \".[pre-dev]\"" - "pylint --rcfile .pylintrc samcli" - + # Dev Tests - "pip install -e \".[dev]\"" - "pytest --cov samcli --cov-report term-missing --cov-fail-under 94 tests/unit" From fe832185be09acb199b2a09ad73bf59e1553d131 Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Date: Wed, 30 Jun 2021 22:28:02 -0700 Subject: [PATCH 09/24] temp: disable testing against python 3.8, and enabled 3.7 (#3009) * temp: disable testing against python 3.8, and enabled 3.7 * temp: disable testing against python 3.8, and enabled 3.7 & 3.6 --- appveyor.yml | 52 ++++++++++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index b96017d0d3..e47ea65e81 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -9,38 +9,38 @@ environment: matrix: - # - PYTHON_HOME: "C:\\Python36-x64" - # PYTHON_VERSION: '3.6' - # PYTHON_ARCH: '64' - # NOSE_PARAMETERIZED_NO_WARN: 1 - # INSTALL_PY_37_PIP: 1 - # INSTALL_PY_38_PIP: 1 - # AWS_S3: 'AWS_S3_36' - # AWS_ECR: 'AWS_ECR_36' - # APPVEYOR_CONSOLE_DISABLE_PTY: true - - # - PYTHON_HOME: "C:\\Python37-x64" - # PYTHON_VERSION: '3.7' - # PYTHON_ARCH: '64' - # RUN_SMOKE: 1 - # NOSE_PARAMETERIZED_NO_WARN: 1 - # INSTALL_PY_36_PIP: 1 - # INSTALL_PY_38_PIP: 1 - # AWS_S3: 'AWS_S3_37' - # AWS_ECR: 'AWS_ECR_37' - # APPVEYOR_CONSOLE_DISABLE_PTY: true - - - PYTHON_HOME: "C:\\Python38-x64" - PYTHON_VERSION: '3.8' + - PYTHON_HOME: "C:\\Python36-x64" + PYTHON_VERSION: '3.6' + PYTHON_ARCH: '64' + NOSE_PARAMETERIZED_NO_WARN: 1 + INSTALL_PY_37_PIP: 1 + INSTALL_PY_38_PIP: 1 + AWS_S3: 'AWS_S3_36' + AWS_ECR: 'AWS_ECR_36' + APPVEYOR_CONSOLE_DISABLE_PTY: true + + - PYTHON_HOME: "C:\\Python37-x64" + PYTHON_VERSION: '3.7' PYTHON_ARCH: '64' RUN_SMOKE: 1 NOSE_PARAMETERIZED_NO_WARN: 1 INSTALL_PY_36_PIP: 1 - INSTALL_PY_37_PIP: 1 - AWS_S3: 'AWS_S3_38' - AWS_ECR: 'AWS_ECR_38' + INSTALL_PY_38_PIP: 1 + AWS_S3: 'AWS_S3_37' + AWS_ECR: 'AWS_ECR_37' APPVEYOR_CONSOLE_DISABLE_PTY: true + # - PYTHON_HOME: "C:\\Python38-x64" + # PYTHON_VERSION: '3.8' + # PYTHON_ARCH: '64' + # RUN_SMOKE: 1 + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_36_PIP: 1 + # INSTALL_PY_37_PIP: 1 + # AWS_S3: 'AWS_S3_38' + # AWS_ECR: 'AWS_ECR_38' + # APPVEYOR_CONSOLE_DISABLE_PTY: true + for: - matrix: From cc806a28968bae5b0e63845a767307383082458b Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Thu, 1 Jul 2021 00:23:18 -0700 Subject: [PATCH 10/24] fix: enable all runtimes in python testing matrix (#3011) * revert: enable all runtimes in python testing matrix * fix indentation for yml --- appveyor.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index e47ea65e81..ed730e0a24 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -30,16 +30,16 @@ environment: AWS_ECR: 'AWS_ECR_37' APPVEYOR_CONSOLE_DISABLE_PTY: true - # - PYTHON_HOME: "C:\\Python38-x64" - # PYTHON_VERSION: '3.8' - # PYTHON_ARCH: '64' - # RUN_SMOKE: 1 - # NOSE_PARAMETERIZED_NO_WARN: 1 - # INSTALL_PY_36_PIP: 1 - # INSTALL_PY_37_PIP: 1 - # AWS_S3: 'AWS_S3_38' - # AWS_ECR: 'AWS_ECR_38' - # APPVEYOR_CONSOLE_DISABLE_PTY: true + - PYTHON_HOME: "C:\\Python38-x64" + PYTHON_VERSION: '3.8' + PYTHON_ARCH: '64' + RUN_SMOKE: 1 + NOSE_PARAMETERIZED_NO_WARN: 1 + INSTALL_PY_36_PIP: 1 + INSTALL_PY_37_PIP: 1 + AWS_S3: 'AWS_S3_38' + AWS_ECR: 'AWS_ECR_38' + APPVEYOR_CONSOLE_DISABLE_PTY: true for: - From ac4e485b0f3eaa915262caed8cd570529a19adf1 Mon Sep 17 00:00:00 2001 From: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Date: Tue, 6 Jul 2021 09:37:06 -0700 Subject: [PATCH 11/24] chore: update to aws-sam-translator 1.37.0 (#3019) --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 18 +++++++++--------- .../models/function_with_mq_virtual_host.yaml | 19 +++++++++++++++++++ 3 files changed, 29 insertions(+), 10 deletions(-) create mode 100644 tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml diff --git a/requirements/base.txt b/requirements/base.txt index ab432ff159..25efa93b05 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ boto3~=1.14 jmespath~=0.10.0 PyYAML~=5.3 cookiecutter~=1.7.2 -aws-sam-translator==1.36.0 +aws-sam-translator==1.37.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 dateparser~=0.7 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index a7a92e25a4..a2f725e5fd 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile +# This file is autogenerated by pip-compile with python 3.7 # To update, run: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/reproducible-linux.txt @@ -17,10 +17,10 @@ aws-lambda-builders==1.4.0 \ --hash=sha256:5d4e4ecb3d3290f0eec1f62b7b0d9d6b91160ae71447d95899eede392d05f75f \ --hash=sha256:d32f79cf67b189a7598793f69797f284b2eb9a9fada562175b1e854187f95aed # via aws-sam-cli (setup.py) -aws-sam-translator==1.36.0 \ - --hash=sha256:4195ae8196f04803e7f0384a2b5ccd8c2b06ce0d8dc408aa1f1ce96c23bcf39d \ - --hash=sha256:f7d51b661fe1f5613a882f4733d1c92eff4dac36a076eafd18031d209b178695 \ - --hash=sha256:fa1b990d9329d19052e7b91cf0b19371ed9d31a529054b616005884cd662b584 +aws-sam-translator==1.37.0 \ + --hash=sha256:12cbf4af9e95acf73dabfbc44af990dc1e880f35697bb8c04f31b3bb90ab5526 \ + --hash=sha256:26e4866627e4284afc367bee2bd04d3cf23cecc8ff879b419457715a738395a9 \ + --hash=sha256:6884d942a815450637bac48e297996df2dacc27077d25ced09d8e9ce1f6a585c # via aws-sam-cli (setup.py) binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ @@ -88,10 +88,6 @@ itsdangerous==1.1.0 \ --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 # via flask -jinja2-time==0.2.0 \ - --hash=sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40 \ - --hash=sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa - # via cookiecutter jinja2==2.11.3 \ --hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \ --hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6 @@ -99,6 +95,10 @@ jinja2==2.11.3 \ # cookiecutter # flask # jinja2-time +jinja2-time==0.2.0 \ + --hash=sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40 \ + --hash=sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa + # via cookiecutter jmespath==0.10.0 \ --hash=sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9 \ --hash=sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f diff --git a/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml b/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml new file mode 100644 index 0000000000..b5d2c62085 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/function_with_mq_virtual_host.yaml @@ -0,0 +1,19 @@ +Resources: + MQFunction: + Type: 'AWS::Serverless::Function' + Properties: + CodeUri: s3://sam-demo-bucket/queues.zip + Handler: queue.mq_handler + Runtime: python2.7 + Events: + MyMQQueue: + Type: MQ + Properties: + Broker: arn:aws:mq:us-east-2:123456789012:broker:MyBroker:b-1234a5b6-78cd-901e-2fgh-3i45j6k178l9 + Queues: + - "Queue1" + SourceAccessConfigurations: + - Type: BASIC_AUTH + URI: arn:aws:secretsmanager:us-west-2:123456789012:secret:my-path/my-secret-name-1a2b3c + - Type: VIRTUAL_HOST + URI: vhost_name \ No newline at end of file From a1e5c92c1d47ab7b11158b3ef2d7efdb67300cbb Mon Sep 17 00:00:00 2001 From: Mathieu Grandis <73313235+mgrandis@users.noreply.github.com> Date: Tue, 6 Jul 2021 09:46:47 -0700 Subject: [PATCH 12/24] chore: bump version to 1.26.0 (#3020) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index d49ef0cf7e..1c484dccfd 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.25.0" +__version__ = "1.26.0" From 59c85768356089edb265c2ea7f53bce2412f9e19 Mon Sep 17 00:00:00 2001 From: Qingchuan Ma <69653965+qingchm@users.noreply.github.com> Date: Thu, 8 Jul 2021 15:48:08 -0700 Subject: [PATCH 13/24] chore: Improved --resolve-s3 option documentation and deployment without s3 error messages (#2983) * Improve documentation on --resolve-s3 option and improve s3 failure messages * Changed indentation for integration test on s3 error message * Fixed a typo in description * Improve spacing on help text for resolve-s3 option --- samcli/commands/deploy/command.py | 4 +++- samcli/commands/package/command.py | 4 +++- samcli/commands/package/exceptions.py | 3 ++- tests/integration/deploy/test_deploy_command.py | 3 ++- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index 371dc61c4d..5b7744b89d 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -153,7 +153,9 @@ "--resolve-s3", required=False, is_flag=True, - help="Automatically resolve s3 bucket for non-guided deployments." + help="Automatically resolve s3 bucket for non-guided deployments. " + "Enabling this option will also create a managed default s3 bucket for you. " + "If you do not provide a --s3-bucket value, the managed bucket will be used. " "Do not use --s3-guided parameter with this option.", ) @metadata_override_option diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index cab68b6d88..cc0dc35c5d 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -121,7 +121,9 @@ def resources_and_properties_help_string(): exc_set=PackageResolveS3AndS3SetError, exc_not_set=PackageResolveS3AndS3NotSetError, ), - help="Automatically resolve s3 bucket for non-guided deployments." + help="Automatically resolve s3 bucket for non-guided deployments. " + "Enabling this option will also create a managed default s3 bucket for you. " + "If you do not provide a --s3-bucket value, the managed bucket will be used. " "Do not use --s3-guided parameter with this option.", ) @metadata_override_option diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index a650f62843..af549058e9 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -124,7 +124,8 @@ class BucketNotSpecifiedError(UserException): def __init__(self, **kwargs): self.kwargs = kwargs - message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided" + message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name, or use --resolve-s3 \ +to create a managed default bucket, or run sam deploy --guided" super().__init__(message=message_fmt.format(**self.kwargs)) diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index 3e4bd53f87..893799e157 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -313,7 +313,8 @@ def test_deploy_without_s3_bucket(self, template_file): self.assertEqual(deploy_process_execute.process.returncode, 1) self.assertIn( bytes( - f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", + f"S3 Bucket not specified, use --s3-bucket to specify a bucket name, or use --resolve-s3 \ +to create a managed default bucket, or run sam deploy --guided", encoding="utf-8", ), deploy_process_execute.stderr, From 0626f9463f565acebc5b733d16007fd4f666413f Mon Sep 17 00:00:00 2001 From: _sam <3804518+aahung@users.noreply.github.com> Date: Tue, 20 Jul 2021 14:02:07 -0700 Subject: [PATCH 14/24] feat: Add SAM Pipeline commands (#3085) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * sam pipeline bootstrap (#2811) * two-stages-pipeline plugin * typos * add docstring * make mypy happy * removing swap file * delete the two_stages_pipeline plugin as the pipeline-bootstrap command took over its responsibility * remove 'get_template_function_runtimes' function as the decision is made to not process the SAM template during pipeline init which was the only place we use the function * sam pipeline bootstrap command * move the pipelineconfig.toml file to .aws-sam * UX - rewriting Co-authored-by: Chris Rehn * UX improvements * make black happy * apply review comments * UX - rewriting Co-authored-by: Chris Rehn * refactor * Apply review comments * use python way of array elements assignments * Update samcli/lib/pipeline/bootstrap/stage.py Co-authored-by: _sam <3804518+aahung@users.noreply.github.com> * apply review comments * typo * read using utf-8 * create and user a safe version of the save_config method * apply review comments * rename _get_command_name to _get_command_names * don't save generated ARNs for now, will save during init * Revert "don't save generated ARNs for now, will save during init" This reverts commit d184e164022d9560131c62a826436edbc93da189. * Notify the user to rotate periodically rotate the IAM credentials * typo * Use AES instead of KMS for S3 SSE * rename Ecr to ECR and Iam to IAM * Grant lambda service explicit permissions to thhe ECR instead of relying on giving this permissions on ad-hoc while creating the container images Co-authored-by: Chris Rehn Co-authored-by: _sam <3804518+aahung@users.noreply.github.com> * sam pipeline init command (#2831) * sam pipeline init command * apply review comments * apply review comments * display a message that we have successfully created the pipeline configuration file(s). * doc typo * Let 'sam pipeline init' prefills pipeline's infrastructure resources… (#2894) * Let 'sam pipeline init' prefills pipeline's infrastructure resources' values from 'sam pipeline bootstrap' results. * save bootstrapped sateg region * make black happy * exclude non-dict keys from samconfig.get_env_names method. * Rename the pipeline 'Stage' concept to 'Environment' (#2908) * Rename the pipeline 'Stage' concept to 'Environment' * typo * Rename --environment-name argument to --environment * Sam pipelines ux rename ecr repo to image repository (#2910) * Rename ecr-repo to image-repository * UT Fixes * typo * typo * feat: Support creating pipeline files directly into . without hooks (#2911) * feat: Support creating pipeline files directly into . without hooks * Integration test for pipeline init and pipeline bootstrap (#2841) * Expose Environment._get_stack_name for integ test to predict stack name * Add integ test for pipeline bootstrap * Add init integ test * small UX improvements: (#2914) * small UX improvements: 1. show a message when the user cancels a bootstrapping command. 2. Don't prompt for CI/CD provider or provider templates if there is only one choice. 3. Make PipelineFileAlreadyExistsError a UserError. 4. use the Colored class instead of fg='color' when prompting a colored message. 5. Fix a bug where we were not allowing empty response for not required questions. * Fix Integration Test: We now don't ask the user to select a provider's pipeline template if there is only one * Add docs for PipelineFileAlreadyExistsError * make black happy * Sam pipelines s3 security (#2975) * Deny non https requests for the artifacts S3 bucket * enable bucket serverside logging * add integration tests for artifacts bucket SSL-only requests and access logging * typo * Ensure the ArtifactsLoggingBucket denies non ssl requests (#2976) * Sam pipelines ux round 3 (#2979) * rename customer facing message 'CI/CD provider' to 'CI/CD system' * add a note about what 'Environment Name' is during the pipeline bootstrap guided context * Apply suggestions from code review typo Co-authored-by: Chris Rehn Co-authored-by: Chris Rehn * let pipeline IAM user assume only IAM roles tagged with Role=pipeline-execution-role (#2982) * Adding AWS_ prefix to displayed out. (#2993) Co-authored-by: Tarun Mall * Add region to pipeline bootstrap interactive flow (#2997) * Ask AWS region in bootstrap interactive flow * Read default region from boto session first * Fix a unit test * Inform write to pipelineconfig.toml at the end of bootstrap (#3002) * Print info about pipelineconfig.toml after resources are bootstrapped * Update samcli/commands/pipeline/bootstrap/cli.py Co-authored-by: Chris Rehn Co-authored-by: Chris Rehn * List detected env names in pipeline init when prompt to input the env name (#3000) * Allow question.question can be resolved using key path * Pass the list of env names message (environment_names_message) into pipeline init interactive flow context * Update samcli/commands/pipeline/init/interactive_init_flow.py Co-authored-by: Chris Rehn * Fix unit test (trigger pr builds) * Fix integ test * Fix integ test Co-authored-by: Chris Rehn * Adding account id to bootstrap message. (#2998) * Adding account id to bootstrap message. * adding docstring * Addressing PR comments. * Adding unit tests. * Fixing unit tests. Co-authored-by: Tarun Mall * Cfn creds fix (#3014) * Removing pipeline user creds from cfn output. This maintains same user exp. Co-authored-by: Tarun Mall * Ux bootstrap revamp 20210706 (#3021) * Add intro paragraph to bootstrap * Add switch account prompt * Revamp stage definition prompt * Revamp existing resources prompt * Revamp security prompt * Allow answers to be changed later * Add exit message for bootstrap * Add exit message for bootstrap (1) * Add indentation to review values * Add "Below is the summary of the answers:" * Sweep pylint errors * Update unit tests * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/guided_context.py Co-authored-by: Chris Rehn * Update samcli/commands/pipeline/bootstrap/cli.py Co-authored-by: Chris Rehn * Update unit tests * Add bold to other literals Co-authored-by: Chris Rehn * Adding account condition for CFN execution role. (#3027) Co-authored-by: Tarun Mall * pipeline UX revamp 20210707 (#3031) * Allow running bootstrap inside pipeline init * Select account credential source within bootstrap * Add bootstrap decorations within pipeline init * Removing ip range option from bootstrap. (#3036) * Removing ip range option from bootstrap. * Fixing unit test from UX PR. Co-authored-by: Tarun Mall * Fix toml file incorrect read/write in init --bootstrap (#3037) * Temporarily removing account fix. (#3038) Co-authored-by: Tarun Mall * Rename environment to stage (#3040) * Improve account source selection (#3042) * Fixing various cosmetics UX issues with pipeline workflow. (#3046) * Fixing credential to credentials * Forcing text color to yellow. * Adding new line after stage diagram. * Adding extra line after checking bootstrap message. * Renaming config -> configuration * account source -> credential source * Removing old message. * Fixing indentation in list. * Fixing bunch of indentation. * fixing f string Co-authored-by: Tarun Mall * Auto skip questions if stage detected (#3045) * Autofill question if default value is presented * Allow to use index to select stage names (#3051) * Updating message when bootstrap stages are missing. (#3058) * Updating message when bootstrap stages are missing. * Fixing indendation Co-authored-by: Tarun Mall * Fixing bootstrap integ tests. (#3061) * Fixing bootstrap integ tests. * Cleaning up some integ tests. * Using environment variables when running integ test on CI. * Using expression instead of full loop. * Adding instruction to use default profile on local. Co-authored-by: Tarun Mall * Fix bootstrap test region (#3064) * Fix bootstrap region in integ test * Fix regions in non-interactive mode as well * Add more pipeline init integ test (#3065) * Fix existing pipeline init integ test * Add more pipeline init integ tests * Config file bug (#3066) * Validating config file after bootstrap stack creation. * Validating config file after bootstrap. Co-authored-by: Tarun Mall * Fix pipeline init integ test because of pipelineconfig file exists (#3067) * Make stage name randomized to avoid race condition among multi canary runs (#3078) * Load number of stages from pipeline template (#3059) * Load number of stages from templates * Rename variable and add debug log * Add encoding to open() * Allow roles with Tag aws-sam-pipeline-codebuild-service-role to assume PipelineExecutionRole (#2950) * pipeline init UX: Ask to confirm when file exists (#3079) * Ask to confirm overriding if files already exist, or save to another directory * Add doc links (#3087) * Adding accidentally removed tests back. (#3088) Co-authored-by: Tarun Mall Co-authored-by: elbayaaa <72949274+elbayaaa@users.noreply.github.com> Co-authored-by: Chris Rehn Co-authored-by: Ahmed Elbayaa Co-authored-by: Tarun Co-authored-by: Tarun Mall --- mypy.ini | 2 +- samcli/cli/command.py | 1 + samcli/cli/context.py | 4 +- samcli/commands/_utils/template.py | 6 +- samcli/commands/deploy/guided_context.py | 4 +- samcli/commands/exceptions.py | 19 + samcli/commands/pipeline/__init__.py | 0 .../commands/pipeline/bootstrap/__init__.py | 0 samcli/commands/pipeline/bootstrap/cli.py | 238 ++++++++ .../pipeline/bootstrap/guided_context.py | 249 ++++++++ samcli/commands/pipeline/external_links.py | 8 + samcli/commands/pipeline/init/__init__.py | 0 samcli/commands/pipeline/init/cli.py | 51 ++ .../pipeline/init/interactive_init_flow.py | 482 +++++++++++++++ .../init/pipeline_templates_manifest.py | 61 ++ samcli/commands/pipeline/pipeline.py | 21 + samcli/lib/bootstrap/bootstrap.py | 33 +- samcli/lib/config/samconfig.py | 14 +- samcli/lib/cookiecutter/exceptions.py | 4 +- samcli/lib/cookiecutter/interactive_flow.py | 22 +- .../cookiecutter/interactive_flow_creator.py | 6 +- samcli/lib/cookiecutter/processor.py | 2 +- samcli/lib/cookiecutter/question.py | 104 +++- samcli/lib/cookiecutter/template.py | 28 +- samcli/lib/pipeline/__init__.py | 0 samcli/lib/pipeline/bootstrap/__init__.py | 0 samcli/lib/pipeline/bootstrap/resource.py | 138 +++++ samcli/lib/pipeline/bootstrap/stage.py | 330 ++++++++++ .../pipeline/bootstrap/stage_resources.yaml | 358 +++++++++++ samcli/lib/utils/colors.py | 4 + samcli/lib/utils/defaults.py | 8 + samcli/lib/utils/git_repo.py | 2 +- .../lib/utils/managed_cloudformation_stack.py | 94 ++- samcli/lib/utils/profile.py | 10 + samcli/yamlhelper.py | 10 +- tests/integration/pipeline/__init__.py | 0 tests/integration/pipeline/base.py | 154 +++++ .../pipeline/test_bootstrap_command.py | 380 ++++++++++++ .../integration/pipeline/test_init_command.py | 299 +++++++++ .../custom_template/cookiecutter.json | 4 + .../pipeline/custom_template/metadata.json | 3 + .../pipeline/custom_template/questions.json | 7 + .../{{cookiecutter.outputDir}}/weather | 1 + .../testdata/pipeline/expected_jenkinsfile | 177 ++++++ tests/testing_utils.py | 5 + tests/unit/commands/_utils/test_template.py | 8 +- .../commands/deploy/test_guided_context.py | 6 +- tests/unit/commands/pipeline/__init__.py | 0 .../commands/pipeline/bootstrap/__init__.py | 0 .../commands/pipeline/bootstrap/test_cli.py | 276 +++++++++ .../pipeline/bootstrap/test_guided_context.py | 231 +++++++ tests/unit/commands/pipeline/init/__init__.py | 0 tests/unit/commands/pipeline/init/test_cli.py | 22 + .../init/test_initeractive_init_flow.py | 566 ++++++++++++++++++ .../init/test_pipeline_templates_manifest.py | 82 +++ tests/unit/lib/bootstrap/test_bootstrap.py | 34 +- tests/unit/lib/cookiecutter/test_question.py | 17 +- tests/unit/lib/cookiecutter/test_template.py | 19 +- tests/unit/lib/pipeline/__init__.py | 0 tests/unit/lib/pipeline/bootstrap/__init__.py | 0 .../pipeline/bootstrap/test_environment.py | 425 +++++++++++++ .../lib/pipeline/bootstrap/test_resource.py | 81 +++ tests/unit/lib/samconfig/test_samconfig.py | 34 +- .../test_managed_cloudformation_stack.py | 21 +- 64 files changed, 5052 insertions(+), 113 deletions(-) create mode 100644 samcli/commands/pipeline/__init__.py create mode 100644 samcli/commands/pipeline/bootstrap/__init__.py create mode 100644 samcli/commands/pipeline/bootstrap/cli.py create mode 100644 samcli/commands/pipeline/bootstrap/guided_context.py create mode 100644 samcli/commands/pipeline/external_links.py create mode 100644 samcli/commands/pipeline/init/__init__.py create mode 100644 samcli/commands/pipeline/init/cli.py create mode 100644 samcli/commands/pipeline/init/interactive_init_flow.py create mode 100644 samcli/commands/pipeline/init/pipeline_templates_manifest.py create mode 100644 samcli/commands/pipeline/pipeline.py create mode 100644 samcli/lib/pipeline/__init__.py create mode 100644 samcli/lib/pipeline/bootstrap/__init__.py create mode 100644 samcli/lib/pipeline/bootstrap/resource.py create mode 100644 samcli/lib/pipeline/bootstrap/stage.py create mode 100644 samcli/lib/pipeline/bootstrap/stage_resources.yaml create mode 100644 samcli/lib/utils/defaults.py create mode 100644 samcli/lib/utils/profile.py create mode 100644 tests/integration/pipeline/__init__.py create mode 100644 tests/integration/pipeline/base.py create mode 100644 tests/integration/pipeline/test_bootstrap_command.py create mode 100644 tests/integration/pipeline/test_init_command.py create mode 100644 tests/integration/testdata/pipeline/custom_template/cookiecutter.json create mode 100644 tests/integration/testdata/pipeline/custom_template/metadata.json create mode 100644 tests/integration/testdata/pipeline/custom_template/questions.json create mode 100644 tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather create mode 100644 tests/integration/testdata/pipeline/expected_jenkinsfile create mode 100644 tests/unit/commands/pipeline/__init__.py create mode 100644 tests/unit/commands/pipeline/bootstrap/__init__.py create mode 100644 tests/unit/commands/pipeline/bootstrap/test_cli.py create mode 100644 tests/unit/commands/pipeline/bootstrap/test_guided_context.py create mode 100644 tests/unit/commands/pipeline/init/__init__.py create mode 100644 tests/unit/commands/pipeline/init/test_cli.py create mode 100644 tests/unit/commands/pipeline/init/test_initeractive_init_flow.py create mode 100644 tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py create mode 100644 tests/unit/lib/pipeline/__init__.py create mode 100644 tests/unit/lib/pipeline/bootstrap/__init__.py create mode 100644 tests/unit/lib/pipeline/bootstrap/test_environment.py create mode 100644 tests/unit/lib/pipeline/bootstrap/test_resource.py diff --git a/mypy.ini b/mypy.ini index 497c022c95..f6915caefe 100644 --- a/mypy.ini +++ b/mypy.ini @@ -59,6 +59,6 @@ ignore_missing_imports=True ignore_missing_imports=True # progressive add typechecks and these modules already complete the process, let's keep them clean -[mypy-samcli.commands.build,samcli.lib.build.*,samcli.commands.local.cli_common.invoke_context,samcli.commands.local.lib.local_lambda,samcli.lib.providers.*,samcli.lib.utils.git_repo.py] +[mypy-samcli.commands.build,samcli.lib.build.*,samcli.commands.local.cli_common.invoke_context,samcli.commands.local.lib.local_lambda,samcli.lib.providers.*,samcli.lib.utils.git_repo.py,samcli.lib.cookiecutter.*,samcli.lib.pipeline.*,samcli.commands.pipeline.*] disallow_untyped_defs=True disallow_incomplete_defs=True \ No newline at end of file diff --git a/samcli/cli/command.py b/samcli/cli/command.py index 384529f78b..c135400586 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -21,6 +21,7 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", + "samcli.commands.pipeline.pipeline", # We intentionally do not expose the `bootstrap` command for now. We might open it up later # "samcli.commands.bootstrap", ] diff --git a/samcli/cli/context.py b/samcli/cli/context.py index a69ebb9ff2..74c35155a1 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -4,7 +4,7 @@ import logging import uuid -from typing import Optional, cast +from typing import Optional, cast, List import boto3 import botocore @@ -186,7 +186,7 @@ def _refresh_session(self): raise CredentialsError(str(ex)) from ex -def get_cmd_names(cmd_name, ctx): +def get_cmd_names(cmd_name, ctx) -> List[str]: """ Given the click core context, return a list representing all the subcommands passed to the CLI diff --git a/samcli/commands/_utils/template.py b/samcli/commands/_utils/template.py index bd9658b55b..08c02836da 100644 --- a/samcli/commands/_utils/template.py +++ b/samcli/commands/_utils/template.py @@ -9,9 +9,6 @@ import yaml from botocore.utils import set_value_from_jmespath -from samcli.commands.exceptions import UserException -from samcli.lib.utils.packagetype import ZIP -from samcli.yamlhelper import yaml_parse, yaml_dump from samcli.commands._utils.resources import ( METADATA_WITH_LOCAL_PATHS, RESOURCES_WITH_LOCAL_PATHS, @@ -19,6 +16,9 @@ AWS_LAMBDA_FUNCTION, get_packageable_resource_paths, ) +from samcli.commands.exceptions import UserException +from samcli.lib.utils.packagetype import ZIP +from samcli.yamlhelper import yaml_parse, yaml_dump class TemplateNotFoundException(UserException): diff --git a/samcli/commands/deploy/guided_context.py b/samcli/commands/deploy/guided_context.py index dafdf0a331..10fd3b6da8 100644 --- a/samcli/commands/deploy/guided_context.py +++ b/samcli/commands/deploy/guided_context.py @@ -6,7 +6,6 @@ from typing import Dict, Any, List import click -from botocore.session import get_session from click import confirm from click import prompt from click.types import FuncParamType @@ -36,6 +35,7 @@ from samcli.lib.providers.sam_function_provider import SamFunctionProvider from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.colors import Colored +from samcli.lib.utils.defaults import get_default_aws_region from samcli.lib.utils.packagetype import IMAGE LOG = logging.getLogger(__name__) @@ -110,7 +110,7 @@ def guided_prompts(self, parameter_override_keys): The keys of parameters to override, for each key, customers will be asked to provide a value """ default_stack_name = self.stack_name or "sam-app" - default_region = self.region or get_session().get_config_variable("region") or "us-east-1" + default_region = self.region or get_default_aws_region() default_capabilities = self.capabilities[0] or ("CAPABILITY_IAM",) default_config_env = self.config_env or DEFAULT_ENV default_config_file = self.config_file or DEFAULT_CONFIG_FILE_NAME diff --git a/samcli/commands/exceptions.py b/samcli/commands/exceptions.py index 7b8f253609..a27f4872cf 100644 --- a/samcli/commands/exceptions.py +++ b/samcli/commands/exceptions.py @@ -59,3 +59,22 @@ class ContainersInitializationException(UserException): """ Exception class when SAM is not able to initialize any of the lambda functions containers """ + + +class PipelineTemplateCloneException(UserException): + """ + Exception class when unable to download pipeline templates from a Git repository during `sam pipeline init` + """ + + +class AppPipelineTemplateManifestException(UserException): + """ + Exception class when SAM is not able to parse the "manifest.yaml" file located in the SAM pipeline templates + Git repo: "github.com/aws/aws-sam-cli-pipeline-init-templates.git + """ + + +class AppPipelineTemplateMetadataException(UserException): + """ + Exception class when SAM is not able to parse the "metadata.json" file located in the SAM pipeline templates + """ diff --git a/samcli/commands/pipeline/__init__.py b/samcli/commands/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/pipeline/bootstrap/__init__.py b/samcli/commands/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py new file mode 100644 index 0000000000..9e8b454992 --- /dev/null +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -0,0 +1,238 @@ +""" +CLI command for "pipeline bootstrap", which sets up the require pipeline infrastructure resources +""" +import os +from textwrap import dedent +from typing import Any, Dict, List, Optional + +import click + +from samcli.cli.cli_config_file import configuration_option, TomlProvider +from samcli.cli.main import pass_context, common_options, aws_creds_options, print_cmdline_args +from samcli.lib.config.samconfig import SamConfig +from samcli.lib.pipeline.bootstrap.stage import Stage +from samcli.lib.telemetry.metric import track_command +from samcli.lib.utils.colors import Colored +from samcli.lib.utils.version_checker import check_newer_version +from .guided_context import GuidedContext +from ..external_links import CONFIG_AWS_CRED_ON_CICD_URL + +SHORT_HELP = "Generates the necessary AWS resources to connect your CI/CD system." + +HELP_TEXT = """ +SAM Pipeline Bootstrap generates the necessary AWS resources to connect your +CI/CD system. This step must be completed for each pipeline stage prior to +running sam pipeline init +""" + +PIPELINE_CONFIG_DIR = os.path.join(".aws-sam", "pipeline") +PIPELINE_CONFIG_FILENAME = "pipelineconfig.toml" + + +@click.command("bootstrap", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) +@configuration_option(provider=TomlProvider(section="parameters")) +@click.option( + "--interactive/--no-interactive", + is_flag=True, + default=True, + help="Disable interactive prompting for bootstrap parameters, and fail if any required arguments are missing.", +) +@click.option( + "--stage", + help="The name of the corresponding stage. It is used as a suffix for the created resources.", + required=False, +) +@click.option( + "--pipeline-user", + help="An IAM user generated or referenced by sam pipeline bootstrap in order to " + "allow the connected CI/CD system to connect to the SAM CLI.", + required=False, +) +@click.option( + "--pipeline-execution-role", + help="Execution role that the CI/CD system assumes in order to make changes to resources on your behalf.", + required=False, +) +@click.option( + "--cloudformation-execution-role", + help="Execution role that CloudFormation assumes in order to make changes to resources on your behalf", + required=False, +) +@click.option( + "--bucket", + help="The name of the S3 bucket where this command uploads your CloudFormation template. This is required for" + "deployments of templates sized greater than 51,200 bytes.", + required=False, +) +@click.option( + "--create-image-repository/--no-create-image-repository", + is_flag=True, + default=False, + help="If set to true and no ECR image repository is provided, this command will create an ECR image repository " + "to hold the container images of Lambda functions having an Image package type.", +) +@click.option( + "--image-repository", + help="ECR repo uri where this command uploads the image artifacts that are referenced in your template.", + required=False, +) +@click.option( + "--confirm-changeset/--no-confirm-changeset", + default=True, + is_flag=True, + help="Prompt to confirm if the resources is to be deployed by SAM CLI.", +) +@common_options +@aws_creds_options +@pass_context +@track_command +@check_newer_version +@print_cmdline_args +def cli( + ctx: Any, + interactive: bool, + stage: Optional[str], + pipeline_user: Optional[str], + pipeline_execution_role: Optional[str], + cloudformation_execution_role: Optional[str], + bucket: Optional[str], + create_image_repository: bool, + image_repository: Optional[str], + confirm_changeset: bool, + config_file: Optional[str], + config_env: Optional[str], +) -> None: + """ + `sam pipeline bootstrap` command entry point + """ + do_cli( + region=ctx.region, + profile=ctx.profile, + interactive=interactive, + stage_name=stage, + pipeline_user_arn=pipeline_user, + pipeline_execution_role_arn=pipeline_execution_role, + cloudformation_execution_role_arn=cloudformation_execution_role, + artifacts_bucket_arn=bucket, + create_image_repository=create_image_repository, + image_repository_arn=image_repository, + confirm_changeset=confirm_changeset, + config_file=config_env, + config_env=config_file, + ) # pragma: no cover + + +def do_cli( + region: Optional[str], + profile: Optional[str], + interactive: bool, + stage_name: Optional[str], + pipeline_user_arn: Optional[str], + pipeline_execution_role_arn: Optional[str], + cloudformation_execution_role_arn: Optional[str], + artifacts_bucket_arn: Optional[str], + create_image_repository: bool, + image_repository_arn: Optional[str], + confirm_changeset: bool, + config_file: Optional[str], + config_env: Optional[str], + standalone: bool = True, +) -> None: + """ + implementation of `sam pipeline bootstrap` command + """ + if not pipeline_user_arn: + pipeline_user_arn = _load_saved_pipeline_user_arn() + + if interactive: + if standalone: + click.echo( + dedent( + """\ + + sam pipeline bootstrap generates the necessary AWS resources to connect a stage in + your CI/CD system. We will ask for [1] stage definition, [2] account details, and + [3] references to existing resources in order to bootstrap these pipeline + resources. + """ + ), + ) + + guided_context = GuidedContext( + profile=profile, + stage_name=stage_name, + pipeline_user_arn=pipeline_user_arn, + pipeline_execution_role_arn=pipeline_execution_role_arn, + cloudformation_execution_role_arn=cloudformation_execution_role_arn, + artifacts_bucket_arn=artifacts_bucket_arn, + create_image_repository=create_image_repository, + image_repository_arn=image_repository_arn, + region=region, + ) + guided_context.run() + stage_name = guided_context.stage_name + pipeline_user_arn = guided_context.pipeline_user_arn + pipeline_execution_role_arn = guided_context.pipeline_execution_role_arn + cloudformation_execution_role_arn = guided_context.cloudformation_execution_role_arn + artifacts_bucket_arn = guided_context.artifacts_bucket_arn + create_image_repository = guided_context.create_image_repository + image_repository_arn = guided_context.image_repository_arn + region = guided_context.region + profile = guided_context.profile + + if not stage_name: + raise click.UsageError("Missing required parameter '--stage'") + + environment: Stage = Stage( + name=stage_name, + aws_profile=profile, + aws_region=region, + pipeline_user_arn=pipeline_user_arn, + pipeline_execution_role_arn=pipeline_execution_role_arn, + cloudformation_execution_role_arn=cloudformation_execution_role_arn, + artifacts_bucket_arn=artifacts_bucket_arn, + create_image_repository=create_image_repository, + image_repository_arn=image_repository_arn, + ) + + bootstrapped: bool = environment.bootstrap(confirm_changeset=confirm_changeset) + + if bootstrapped: + environment.print_resources_summary() + + environment.save_config_safe( + config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME, cmd_names=_get_bootstrap_command_names() + ) + + click.secho( + dedent( + f"""\ + View the definition in {os.path.join(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME)}, + run sam pipeline bootstrap to generate another set of resources, or proceed to + sam pipeline init to create your pipeline configuration file. + """ + ) + ) + + if not environment.pipeline_user.is_user_provided: + click.secho( + dedent( + f"""\ + Before running {Colored().bold("sam pipeline init")}, we recommend first setting up AWS credentials + in your CI/CD account. Read more about how to do so with your provider in + {CONFIG_AWS_CRED_ON_CICD_URL}. + """ + ) + ) + + +def _load_saved_pipeline_user_arn() -> Optional[str]: + samconfig: SamConfig = SamConfig(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) + if not samconfig.exists(): + return None + config: Dict[str, str] = samconfig.get_all(cmd_names=_get_bootstrap_command_names(), section="parameters") + return config.get("pipeline_user") + + +def _get_bootstrap_command_names() -> List[str]: + return ["pipeline", "bootstrap"] diff --git a/samcli/commands/pipeline/bootstrap/guided_context.py b/samcli/commands/pipeline/bootstrap/guided_context.py new file mode 100644 index 0000000000..a7f1f89b08 --- /dev/null +++ b/samcli/commands/pipeline/bootstrap/guided_context.py @@ -0,0 +1,249 @@ +""" +An interactive flow that prompt the user for required information to bootstrap the AWS account of an environment +with the required infrastructure +""" +import os +import sys +from textwrap import dedent +from typing import Optional, List, Tuple, Callable + +import click +from botocore.credentials import EnvProvider + +from samcli.commands.exceptions import CredentialsError +from samcli.commands.pipeline.external_links import CONFIG_AWS_CRED_DOC_URL +from samcli.lib.bootstrap.bootstrap import get_current_account_id +from samcli.lib.utils.colors import Colored + +from samcli.lib.utils.defaults import get_default_aws_region +from samcli.lib.utils.profile import list_available_profiles + + +class GuidedContext: + def __init__( + self, + profile: Optional[str] = None, + stage_name: Optional[str] = None, + pipeline_user_arn: Optional[str] = None, + pipeline_execution_role_arn: Optional[str] = None, + cloudformation_execution_role_arn: Optional[str] = None, + artifacts_bucket_arn: Optional[str] = None, + create_image_repository: bool = False, + image_repository_arn: Optional[str] = None, + region: Optional[str] = None, + ) -> None: + self.profile = profile + self.stage_name = stage_name + self.pipeline_user_arn = pipeline_user_arn + self.pipeline_execution_role_arn = pipeline_execution_role_arn + self.cloudformation_execution_role_arn = cloudformation_execution_role_arn + self.artifacts_bucket_arn = artifacts_bucket_arn + self.create_image_repository = create_image_repository + self.image_repository_arn = image_repository_arn + self.region = region + self.color = Colored() + + def _prompt_account_id(self) -> None: + profiles = list_available_profiles() + click.echo("The following AWS credential sources are available to use:") + click.echo( + dedent( + f"""\ + To know more about configuration AWS credentials, visit the link below: + {CONFIG_AWS_CRED_DOC_URL}\ + """ + ) + ) + has_env_creds = os.getenv(EnvProvider.ACCESS_KEY) and os.getenv(EnvProvider.SECRET_KEY) + click.echo(f"\t1 - Environment variables{' (not available)' if not has_env_creds else ''}") + for i, profile in enumerate(profiles): + click.echo(f"\t{i + 2} - {profile} (named profile)") + click.echo("\tq - Quit and configure AWS credentials") + answer = click.prompt( + "Select a credential source to associate with this stage", + show_choices=False, + show_default=False, + type=click.Choice((["1"] if has_env_creds else []) + [str(i + 2) for i in range(len(profiles))] + ["q"]), + ) + if answer == "q": + sys.exit(0) + elif answer == "1": + # by default, env variable has higher precedence + # https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html#envvars-list + self.profile = None + else: + self.profile = profiles[int(answer) - 2] + + try: + account_id = get_current_account_id(self.profile) + click.echo(self.color.green(f"Associated account {account_id} with stage {self.stage_name}.")) + except CredentialsError as ex: + click.echo(f"{self.color.red(ex.message)}\n") + self._prompt_account_id() + + def _prompt_stage_name(self) -> None: + click.echo( + "Enter a name for this stage. This will be referenced later when you use the sam pipeline init command:" + ) + self.stage_name = click.prompt( + "Stage name", + default=self.stage_name, + type=click.STRING, + ) + + def _prompt_region_name(self) -> None: + self.region = click.prompt( + "Enter the region in which you want these resources to be created", + type=click.STRING, + default=get_default_aws_region(), + ) + + def _prompt_pipeline_user(self) -> None: + self.pipeline_user_arn = click.prompt( + "Enter the pipeline IAM user ARN if you have previously created one, or we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_pipeline_execution_role(self) -> None: + self.pipeline_execution_role_arn = click.prompt( + "Enter the pipeline execution role ARN if you have previously created one, " + "or we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_cloudformation_execution_role(self) -> None: + self.cloudformation_execution_role_arn = click.prompt( + "Enter the CloudFormation execution role ARN if you have previously created one, " + "or we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_artifacts_bucket(self) -> None: + self.artifacts_bucket_arn = click.prompt( + "Please enter the artifact bucket ARN for your Lambda function. " + "If you do not have a bucket, we will create one for you", + default="", + type=click.STRING, + ) + + def _prompt_image_repository(self) -> None: + if click.confirm("Does your application contain any IMAGE type Lambda functions?"): + self.image_repository_arn = click.prompt( + "Please enter the ECR image repository ARN(s) for your Image type function(s)." + "If you do not yet have a repository, we will create one for you", + default="", + type=click.STRING, + ) + self.create_image_repository = not bool(self.image_repository_arn) + else: + self.create_image_repository = False + + def _get_user_inputs(self) -> List[Tuple[str, Callable[[], None]]]: + return [ + (f"Account: {get_current_account_id(self.profile)}", self._prompt_account_id), + (f"Stage name: {self.stage_name}", self._prompt_stage_name), + (f"Region: {self.region}", self._prompt_region_name), + ( + f"Pipeline user ARN: {self.pipeline_user_arn}" + if self.pipeline_user_arn + else "Pipeline user: [to be created]", + self._prompt_pipeline_user, + ), + ( + f"Pipeline execution role ARN: {self.pipeline_execution_role_arn}" + if self.pipeline_execution_role_arn + else "Pipeline execution role: [to be created]", + self._prompt_pipeline_execution_role, + ), + ( + f"CloudFormation execution role ARN: {self.cloudformation_execution_role_arn}" + if self.cloudformation_execution_role_arn + else "CloudFormation execution role: [to be created]", + self._prompt_cloudformation_execution_role, + ), + ( + f"Artifacts bucket ARN: {self.artifacts_bucket_arn}" + if self.artifacts_bucket_arn + else "Artifacts bucket: [to be created]", + self._prompt_artifacts_bucket, + ), + ( + f"ECR image repository ARN: {self.image_repository_arn}" + if self.image_repository_arn + else f"ECR image repository: [{'to be created' if self.create_image_repository else 'skipped'}]", + self._prompt_image_repository, + ), + ] + + def run(self) -> None: # pylint: disable=too-many-branches + """ + Runs an interactive questionnaire to prompt the user for the ARNs of the AWS resources(infrastructure) required + for the pipeline to work. Users can provide all, none or some resources' ARNs and leave the remaining empty + and it will be created by the bootstrap command + """ + click.secho(self.color.bold("[1] Stage definition")) + if self.stage_name: + click.echo(f"Stage name: {self.stage_name}") + else: + self._prompt_stage_name() + click.echo() + + click.secho(self.color.bold("[2] Account details")) + self._prompt_account_id() + click.echo() + + if not self.region: + self._prompt_region_name() + + if self.pipeline_user_arn: + click.echo(f"Pipeline IAM user ARN: {self.pipeline_user_arn}") + else: + self._prompt_pipeline_user() + click.echo() + + click.secho(self.color.bold("[3] Reference application build resources")) + + if self.pipeline_execution_role_arn: + click.echo(f"Pipeline execution role ARN: {self.pipeline_execution_role_arn}") + else: + self._prompt_pipeline_execution_role() + + if self.cloudformation_execution_role_arn: + click.echo(f"CloudFormation execution role ARN: {self.cloudformation_execution_role_arn}") + else: + self._prompt_cloudformation_execution_role() + + if self.artifacts_bucket_arn: + click.echo(f"Artifacts bucket ARN: {self.cloudformation_execution_role_arn}") + else: + self._prompt_artifacts_bucket() + + if self.image_repository_arn: + click.echo(f"ECR image repository ARN: {self.image_repository_arn}") + else: + self._prompt_image_repository() + click.echo() + + # Ask customers to confirm the inputs + click.secho(self.color.bold("[4] Summary")) + while True: + inputs = self._get_user_inputs() + click.secho("Below is the summary of the answers:") + for i, (text, _) in enumerate(inputs): + click.secho(f"\t{i + 1} - {text}") + edit_input = click.prompt( + text="Press enter to confirm the values above, or select an item to edit the value", + default="0", + show_choices=False, + show_default=False, + type=click.Choice(["0"] + [str(i + 1) for i in range(len(inputs))]), + ) + click.echo() + if int(edit_input): + inputs[int(edit_input) - 1][1]() + click.echo() + else: + break diff --git a/samcli/commands/pipeline/external_links.py b/samcli/commands/pipeline/external_links.py new file mode 100644 index 0000000000..77301ebb1b --- /dev/null +++ b/samcli/commands/pipeline/external_links.py @@ -0,0 +1,8 @@ +""" +The module to store external links. Put them in a centralized place so that we can verify their +validity automatically. +""" +CONFIG_AWS_CRED_DOC_URL = "https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html" + +_SAM_DOC_PREFIX = "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide" +CONFIG_AWS_CRED_ON_CICD_URL = _SAM_DOC_PREFIX + "/serverless-generating-example-ci-cd-others.html" diff --git a/samcli/commands/pipeline/init/__init__.py b/samcli/commands/pipeline/init/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/pipeline/init/cli.py b/samcli/commands/pipeline/init/cli.py new file mode 100644 index 0000000000..bcbe205c6a --- /dev/null +++ b/samcli/commands/pipeline/init/cli.py @@ -0,0 +1,51 @@ +""" +CLI command for "pipeline init" command +""" +from typing import Any, Optional + +import click + +from samcli.cli.cli_config_file import configuration_option, TomlProvider +from samcli.cli.main import pass_context, common_options as cli_framework_options +from samcli.commands.pipeline.init.interactive_init_flow import InteractiveInitFlow +from samcli.lib.telemetry.metric import track_command + +SHORT_HELP = "Generates CI/CD pipeline configuration files." +HELP_TEXT = """ +sam pipeline init generates a pipeline configuration file that you can use to connect your +AWS account(s) to your CI/CD system. Before using sam pipeline init, you must +bootstrap the necessary resources for each stage in your pipeline. You can do this +by running sam pipeline init --bootstrap to be guided through the setup and configuration +file generation process, or refer to resources you have previously created with the +sam pipeline bootstrap command. +""" + + +@click.command("init", help=HELP_TEXT, short_help=SHORT_HELP) +@configuration_option(provider=TomlProvider(section="parameters")) +@click.option( + "--bootstrap", + is_flag=True, + default=False, + help="Allow bootstrapping resources.", +) +@cli_framework_options +@pass_context +@track_command # pylint: disable=R0914 +def cli(ctx: Any, config_env: Optional[str], config_file: Optional[str], bootstrap: bool) -> None: + """ + `sam pipeline init` command entry point + """ + + # Currently we support interactive mode only, i.e. the user doesn't provide the required arguments during the call + # so we call do_cli without any arguments. This will change after supporting the non interactive mode. + do_cli(bootstrap) + + +def do_cli(bootstrap: bool) -> None: + """ + implementation of `sam pipeline init` command + """ + # TODO non-interactive mode + init_flow = InteractiveInitFlow(bootstrap) + init_flow.do_interactive() diff --git a/samcli/commands/pipeline/init/interactive_init_flow.py b/samcli/commands/pipeline/init/interactive_init_flow.py new file mode 100644 index 0000000000..7504f3a66b --- /dev/null +++ b/samcli/commands/pipeline/init/interactive_init_flow.py @@ -0,0 +1,482 @@ +""" +Interactive flow that prompts that users for pipeline template (cookiecutter template) and used it to generate +pipeline configuration file +""" +import json +import logging +import os +from json import JSONDecodeError +from pathlib import Path +from textwrap import dedent +from typing import Dict, List, Tuple + +import click + +from samcli.cli.main import global_cfg +from samcli.commands.exceptions import ( + AppPipelineTemplateMetadataException, + PipelineTemplateCloneException, +) +from samcli.lib.config.samconfig import SamConfig +from samcli.lib.cookiecutter.interactive_flow import InteractiveFlow +from samcli.lib.cookiecutter.interactive_flow_creator import InteractiveFlowCreator +from samcli.lib.cookiecutter.question import Choice +from samcli.lib.cookiecutter.template import Template +from samcli.lib.utils import osutils +from samcli.lib.utils.colors import Colored +from samcli.lib.utils.git_repo import GitRepo, CloneRepoException +from .pipeline_templates_manifest import Provider, PipelineTemplateMetadata, PipelineTemplatesManifest +from ..bootstrap.cli import ( + do_cli as do_bootstrap, + PIPELINE_CONFIG_DIR, + PIPELINE_CONFIG_FILENAME, + _get_bootstrap_command_names, +) + +LOG = logging.getLogger(__name__) +shared_path: Path = global_cfg.config_dir +APP_PIPELINE_TEMPLATES_REPO_URL = "https://github.com/aws/aws-sam-cli-pipeline-init-templates.git" +APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME = "aws-sam-cli-app-pipeline-templates" +CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME = "custom-pipeline-template" +SAM_PIPELINE_TEMPLATE_SOURCE = "AWS Quick Start Pipeline Templates" +CUSTOM_PIPELINE_TEMPLATE_SOURCE = "Custom Pipeline Template Location" + + +class InteractiveInitFlow: + def __init__(self, allow_bootstrap: bool): + self.allow_bootstrap = allow_bootstrap + self.color = Colored() + + def do_interactive(self) -> None: + """ + An interactive flow that prompts the user for pipeline template (cookiecutter template) location, downloads it, + runs its specific questionnaire then generates the pipeline config file + based on the template and user's responses + """ + click.echo( + dedent( + """\ + + sam pipeline init generates a pipeline configuration file that you can use to connect your + AWS account(s) to your CI/CD system. We will guide you through the process to + bootstrap resources for each stage, then walk through the details necessary for + creating the pipeline config file. + + Please ensure you are in the root folder of your SAM application before you begin. + """ + ) + ) + + click.echo("Select a pipeline structure template to get started:") + pipeline_template_source_question = Choice( + key="pipeline-template-source", + text="Select template", + options=[SAM_PIPELINE_TEMPLATE_SOURCE, CUSTOM_PIPELINE_TEMPLATE_SOURCE], + is_required=True, + ) + source = pipeline_template_source_question.ask() + if source == CUSTOM_PIPELINE_TEMPLATE_SOURCE: + generated_files = self._generate_from_custom_location() + else: + generated_files = self._generate_from_app_pipeline_templates() + click.secho(Colored().green("Successfully created the pipeline configuration file(s):")) + for file in generated_files: + click.secho(Colored().green(f"\t- {file}")) + + def _generate_from_app_pipeline_templates( + self, + ) -> List[str]: + """ + Prompts the user to choose a pipeline template from SAM predefined set of pipeline templates hosted in the git + repository: aws/aws-sam-cli-pipeline-init-templates.git + downloads locally, then generates the pipeline configuration file from the selected pipeline template. + Finally, return the list of generated files. + """ + pipeline_templates_local_dir: Path = _clone_app_pipeline_templates() + pipeline_templates_manifest: PipelineTemplatesManifest = _read_app_pipeline_templates_manifest( + pipeline_templates_local_dir + ) + # The manifest contains multiple pipeline-templates so select one + selected_pipeline_template_metadata: PipelineTemplateMetadata = _prompt_pipeline_template( + pipeline_templates_manifest + ) + selected_pipeline_template_dir: Path = pipeline_templates_local_dir.joinpath( + selected_pipeline_template_metadata.location + ) + return self._generate_from_pipeline_template(selected_pipeline_template_dir) + + def _generate_from_custom_location( + self, + ) -> List[str]: + """ + Prompts the user for a custom pipeline template location, downloads locally, + then generates the pipeline config file and return the list of generated files + """ + pipeline_template_git_location: str = click.prompt("Template Git location") + if os.path.exists(pipeline_template_git_location): + return self._generate_from_pipeline_template(Path(pipeline_template_git_location)) + + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + tempdir_path = Path(tempdir) + pipeline_template_local_dir: Path = _clone_pipeline_templates( + pipeline_template_git_location, tempdir_path, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME + ) + return self._generate_from_pipeline_template(pipeline_template_local_dir) + + def _prompt_run_bootstrap_within_pipeline_init(self, stage_names: List[str], number_of_stages: int) -> bool: + """ + Prompt bootstrap if `--bootstrap` flag is provided. Return True if bootstrap process is executed. + """ + if not stage_names: + click.echo("[!] None detected in this account.") + else: + click.echo( + Colored().yellow( + f"Only {len(stage_names)} stage(s) were detected, " + f"fewer than what the template requires: {number_of_stages}." + ) + ) + click.echo() + + if self.allow_bootstrap: + if click.confirm( + "Do you want to go through stage setup process now? If you choose no, " + "you can still reference other bootstrapped resources." + ): + click.secho( + dedent( + """\ + + For each stage, we will ask for [1] stage definition, [2] account details, and [3] + reference application build resources in order to bootstrap these pipeline + resources. + + We recommend using an individual AWS account profiles for each stage in your + pipeline. You can set these profiles up using [little bit of info on how to do + this/docs]. + """ + ) + ) + + click.echo(Colored().bold(f"\nStage {len(stage_names) + 1} Setup\n")) + do_bootstrap( + region=None, + profile=None, + interactive=True, + stage_name=None, + pipeline_user_arn=None, + pipeline_execution_role_arn=None, + cloudformation_execution_role_arn=None, + artifacts_bucket_arn=None, + create_image_repository=False, + image_repository_arn=None, + confirm_changeset=True, + config_file=None, + config_env=None, + standalone=False, + ) + return True + else: + click.echo( + dedent( + """\ + To set up stage(s), please quit the process using Ctrl+C and use one of the following commands: + sam pipeline init --bootstrap To be guided through the stage and config file creation process. + sam pipeline bootstrap To specify details for an individual stage. + """ + ) + ) + click.prompt( + "To reference stage resources bootstrapped in a different account, press enter to proceed", default="" + ) + return False + + def _generate_from_pipeline_template(self, pipeline_template_dir: Path) -> List[str]: + """ + Generates a pipeline config file from a given pipeline template local location + and return the list of generated files. + """ + pipeline_template: Template = _initialize_pipeline_template(pipeline_template_dir) + number_of_stages = (pipeline_template.metadata or {}).get("number_of_stages") + if not number_of_stages: + LOG.debug("Cannot find number_of_stages from template's metadata, set to default 2.") + number_of_stages = 2 + click.echo(f"You are using the {number_of_stages}-stage pipeline template.") + _draw_stage_diagram(number_of_stages) + while True: + click.echo("Checking for existing stages...\n") + stage_names, bootstrap_context = _load_pipeline_bootstrap_resources() + if len(stage_names) < number_of_stages and self._prompt_run_bootstrap_within_pipeline_init( + stage_names, number_of_stages + ): + # the customers just went through the bootstrap process, + # refresh the pipeline bootstrap resources and see whether bootstrap is still needed + continue + break + + context: Dict = pipeline_template.run_interactive_flows(bootstrap_context) + with osutils.mkdir_temp() as generate_dir: + LOG.debug("Generating pipeline files into %s", generate_dir) + context["outputDir"] = "." # prevent cookiecutter from generating a sub-folder + pipeline_template.generate_project(context, generate_dir) + return _copy_dir_contents_to_cwd(generate_dir) + + +def _load_pipeline_bootstrap_resources() -> Tuple[List[str], Dict[str, str]]: + section = "parameters" + context: Dict = {} + + config = SamConfig(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + if not config.exists(): + context[str(["stage_names_message"])] = "" + return [], context + + # config.get_stage_names() will return the list of + # bootstrapped stage names and "default" which is used to store shared values + # we don't want to include "default" here. + stage_names = [stage_name for stage_name in config.get_stage_names() if stage_name != "default"] + for index, stage in enumerate(stage_names): + for key, value in config.get_all(_get_bootstrap_command_names(), section, stage).items(): + context[str([stage, key])] = value + # create an index alias for each stage name + # so that if customers type "1," it is equivalent to the first stage name + context[str([str(index + 1), key])] = value + + # pre-load the list of stage names detected from pipelineconfig.toml + stage_names_message = ( + "Here are the stage names detected " + + f"in {os.path.join(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME)}:\n" + + "\n".join([f"\t{index + 1} - {stage_name}" for index, stage_name in enumerate(stage_names)]) + ) + context[str(["stage_names_message"])] = stage_names_message + + return stage_names, context + + +def _copy_dir_contents_to_cwd(source_dir: str) -> List[str]: + """ + Copy the contents of source_dir into the current cwd. + If existing files are encountered, ask for confirmation. + If not confirmed, all files will be written to + .aws-sam/pipeline/generated-files/ + """ + file_paths: List[str] = [] + existing_file_paths: List[str] = [] + for root, _, files in os.walk(source_dir): + for filename in files: + file_path = Path(root, filename) + target_file_path = Path(".").joinpath(file_path.relative_to(source_dir)) + LOG.debug("Verify %s does not exist", target_file_path) + if target_file_path.exists(): + existing_file_paths.append(str(target_file_path)) + file_paths.append(str(target_file_path)) + if existing_file_paths: + click.echo("\nThe following files already exist:") + for existing_file_path in existing_file_paths: + click.echo(f"\t- {existing_file_path}") + if not click.confirm("Do you want to override them?"): + target_dir = str(Path(PIPELINE_CONFIG_DIR, "generated-files")) + osutils.copytree(source_dir, target_dir) + click.echo(f"All files are saved to {target_dir}.") + return [str(Path(target_dir, path)) for path in file_paths] + LOG.debug("Copy contents of %s to cwd", source_dir) + osutils.copytree(source_dir, ".") + return file_paths + + +def _clone_app_pipeline_templates() -> Path: + """ + clone aws/aws-sam-cli-pipeline-init-templates.git Git repo to the local machine in SAM shared directory. + Returns: + the local directory path where the repo is cloned. + """ + try: + return _clone_pipeline_templates( + repo_url=APP_PIPELINE_TEMPLATES_REPO_URL, + clone_dir=shared_path, + clone_name=APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, + ) + except PipelineTemplateCloneException: + # If can't clone app pipeline templates, try using an old clone from a previous run if already exist + expected_previous_clone_local_path: Path = shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME) + if expected_previous_clone_local_path.exists(): + click.echo("Unable to download updated app pipeline templates, using existing ones") + return expected_previous_clone_local_path + raise + + +def _clone_pipeline_templates(repo_url: str, clone_dir: Path, clone_name: str) -> Path: + """ + clone a given pipeline templates' Git repo to the user machine inside the given clone_dir directory + under the given clone name. For example, if clone_name is "custom-pipeline-template" then the location to clone + to is "/clone/dir/path/custom-pipeline-template/" + + Parameters: + repo_url: the URL of the Git repo to clone + clone_dir: the local parent directory to clone to + clone_name: The folder name to give to the created clone inside clone_dir + + Returns: + Path to the local clone + """ + try: + repo: GitRepo = GitRepo(repo_url) + clone_path: Path = repo.clone(clone_dir, clone_name, replace_existing=True) + return clone_path + except (OSError, CloneRepoException) as ex: + raise PipelineTemplateCloneException(str(ex)) from ex + + +def _read_app_pipeline_templates_manifest(pipeline_templates_dir: Path) -> PipelineTemplatesManifest: + """ + parse and return the manifest yaml file located in the root directory of the SAM pipeline templates folder: + + Parameters: + pipeline_templates_dir: local directory of SAM pipeline templates + + Raises: + AppPipelineTemplateManifestException if the manifest is not found, ill-formatted or missing required keys + + Returns: + The manifest of the pipeline templates + """ + manifest_path: Path = pipeline_templates_dir.joinpath("manifest.yaml") + return PipelineTemplatesManifest(manifest_path) + + +def _prompt_pipeline_template(pipeline_templates_manifest: PipelineTemplatesManifest) -> PipelineTemplateMetadata: + """ + Prompts the user a list of the available CI/CD systems along with associated app pipeline templates to choose + one of them + + Parameters: + pipeline_templates_manifest: A manifest file lists the available providers and the associated pipeline templates + + Returns: + The manifest (A section in the pipeline_templates_manifest) of the chosen pipeline template; + """ + provider = _prompt_cicd_provider(pipeline_templates_manifest.providers) + provider_pipeline_templates: List[PipelineTemplateMetadata] = [ + t for t in pipeline_templates_manifest.templates if t.provider == provider.id + ] + selected_template_manifest: PipelineTemplateMetadata = _prompt_provider_pipeline_template( + provider_pipeline_templates + ) + return selected_template_manifest + + +def _prompt_cicd_provider(available_providers: List[Provider]) -> Provider: + """ + Prompts the user a list of the available CI/CD systems to choose from + + Parameters: + available_providers: List of available CI/CD systems such as Jenkins, Gitlab and CircleCI + + Returns: + The chosen provider + """ + if len(available_providers) == 1: + return available_providers[0] + + question_to_choose_provider = Choice( + key="provider", text="CI/CD system", options=[p.display_name for p in available_providers], is_required=True + ) + chosen_provider_display_name = question_to_choose_provider.ask() + return next(p for p in available_providers if p.display_name == chosen_provider_display_name) + + +def _prompt_provider_pipeline_template( + provider_available_pipeline_templates_metadata: List[PipelineTemplateMetadata], +) -> PipelineTemplateMetadata: + """ + Prompts the user a list of the available pipeline templates to choose from + + Parameters: + provider_available_pipeline_templates_metadata: List of available pipeline templates manifests + + Returns: + The chosen pipeline template manifest + """ + if len(provider_available_pipeline_templates_metadata) == 1: + return provider_available_pipeline_templates_metadata[0] + question_to_choose_pipeline_template = Choice( + key="pipeline-template", + text="Which pipeline template would you like to use?", + options=[t.display_name for t in provider_available_pipeline_templates_metadata], + ) + chosen_pipeline_template_display_name = question_to_choose_pipeline_template.ask() + return next( + t + for t in provider_available_pipeline_templates_metadata + if t.display_name == chosen_pipeline_template_display_name + ) + + +def _initialize_pipeline_template(pipeline_template_dir: Path) -> Template: + """ + Initialize a pipeline template from a given pipeline template (cookiecutter template) location + + Parameters: + pipeline_template_dir: The local location of the pipeline cookiecutter template + + Returns: + The initialized pipeline's cookiecutter template + """ + interactive_flow = _get_pipeline_template_interactive_flow(pipeline_template_dir) + metadata = _get_pipeline_template_metadata(pipeline_template_dir) + return Template(location=str(pipeline_template_dir), interactive_flows=[interactive_flow], metadata=metadata) + + +def _get_pipeline_template_metadata(pipeline_template_dir: Path) -> Dict: + """ + Load the metadata from the file metadata.json located in the template directory, + raise an exception if anything wrong. + """ + metadata_path = Path(pipeline_template_dir, "metadata.json") + if not metadata_path.exists(): + raise AppPipelineTemplateMetadataException(f"Cannot find metadata file {metadata_path}") + try: + with open(metadata_path, "r", encoding="utf-8") as file: + metadata = json.load(file) + if isinstance(metadata, dict): + return metadata + raise AppPipelineTemplateMetadataException(f"Invalid content found in {metadata_path}") + except JSONDecodeError as ex: + raise AppPipelineTemplateMetadataException(f"Invalid JSON found in {metadata_path}") from ex + + +def _get_pipeline_template_interactive_flow(pipeline_template_dir: Path) -> InteractiveFlow: + """ + A pipeline template defines its own interactive flow (questionnaire) in a JSON file named questions.json located + in the root directory of the template. This questionnaire defines a set of questions to prompt to the user and + use the responses as the cookiecutter context + + Parameters: + pipeline_template_dir: The local location of the pipeline cookiecutter template + + Raises: + QuestionsNotFoundException: if the pipeline template is missing questions.json file. + QuestionsFailedParsingException: if questions.json file is ill-formatted or missing required keys. + + Returns: + The interactive flow + """ + flow_definition_path: Path = pipeline_template_dir.joinpath("questions.json") + return InteractiveFlowCreator.create_flow(str(flow_definition_path)) + + +def _lines_for_stage(stage_index: int) -> List[str]: + return [ + " _________ ", + "| |", + f"| Stage {stage_index} |", + "|_________|", + ] + + +def _draw_stage_diagram(number_of_stages: int) -> None: + delimiters = [" ", " ", "->", " "] + stage_lines = [_lines_for_stage(i + 1) for i in range(number_of_stages)] + for i, delimiter in enumerate(delimiters): + click.echo(delimiter.join([stage_lines[stage_i][i] for stage_i in range(number_of_stages)])) + click.echo("") diff --git a/samcli/commands/pipeline/init/pipeline_templates_manifest.py b/samcli/commands/pipeline/init/pipeline_templates_manifest.py new file mode 100644 index 0000000000..8249e14d85 --- /dev/null +++ b/samcli/commands/pipeline/init/pipeline_templates_manifest.py @@ -0,0 +1,61 @@ +""" +Represents a manifest that lists the available SAM pipeline templates. +Example: + providers: + - displayName:Jenkins + id: jenkins + - displayName:Gitlab CI/CD + id: gitlab + - displayName:Github Actions + id: github-actions + templates: + - displayName: jenkins-two-environments-pipeline + provider: Jenkins + location: templates/cookiecutter-jenkins-two-environments-pipeline + - displayName: gitlab-two-environments-pipeline + provider: Gitlab + location: templates/cookiecutter-gitlab-two-environments-pipeline + - displayName: Github-Actions-two-environments-pipeline + provider: Github Actions + location: templates/cookiecutter-github-actions-two-environments-pipeline +""" +from pathlib import Path +from typing import Dict, List + +import yaml + +from samcli.commands.exceptions import AppPipelineTemplateManifestException +from samcli.yamlhelper import parse_yaml_file + + +class Provider: + """ CI/CD system such as Jenkins, Gitlab and GitHub-Actions""" + + def __init__(self, manifest: Dict) -> None: + self.id: str = manifest["id"] + self.display_name: str = manifest["displayName"] + + +class PipelineTemplateMetadata: + """ The metadata of a Given pipeline template""" + + def __init__(self, manifest: Dict) -> None: + self.display_name: str = manifest["displayName"] + self.provider: str = manifest["provider"] + self.location: str = manifest["location"] + + +class PipelineTemplatesManifest: + """ The metadata of the available CI/CD systems and the pipeline templates""" + + def __init__(self, manifest_path: Path) -> None: + try: + manifest: Dict = parse_yaml_file(file_path=str(manifest_path)) + self.providers: List[Provider] = list(map(Provider, manifest["providers"])) + self.templates: List[PipelineTemplateMetadata] = list(map(PipelineTemplateMetadata, manifest["templates"])) + except (FileNotFoundError, KeyError, TypeError, yaml.YAMLError) as ex: + raise AppPipelineTemplateManifestException( + "SAM pipeline templates manifest file is not found or ill-formatted. This could happen if the file " + f"{manifest_path} got deleted or modified." + "If you believe this is not the case, please file an issue at https://github.com/aws/aws-sam-cli/issues" + ) from ex diff --git a/samcli/commands/pipeline/pipeline.py b/samcli/commands/pipeline/pipeline.py new file mode 100644 index 0000000000..2d8df4463e --- /dev/null +++ b/samcli/commands/pipeline/pipeline.py @@ -0,0 +1,21 @@ +""" +Command group for "pipeline" suite commands. It provides common CLI arguments, template parsing capabilities, +setting up stdin/stdout etc +""" + +import click + +from .bootstrap.cli import cli as bootstrap_cli +from .init.cli import cli as init_cli + + +@click.group() +def cli() -> None: + """ + Manage the continuous delivery of the application + """ + + +# Add individual commands under this group +cli.add_command(bootstrap_cli) +cli.add_command(init_cli) diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py index eaed58d630..a9a590dc7f 100644 --- a/samcli/lib/bootstrap/bootstrap.py +++ b/samcli/lib/bootstrap/bootstrap.py @@ -4,32 +4,51 @@ import json import logging +from typing import Optional + +import boto3 +from botocore.exceptions import ClientError + from samcli import __version__ from samcli.cli.global_config import GlobalConfig -from samcli.commands.exceptions import UserException -from samcli.lib.utils.managed_cloudformation_stack import manage_stack as manage_cloudformation_stack +from samcli.commands.exceptions import UserException, CredentialsError +from samcli.lib.utils.managed_cloudformation_stack import StackOutput, manage_stack as manage_cloudformation_stack SAM_CLI_STACK_NAME = "aws-sam-cli-managed-default" LOG = logging.getLogger(__name__) def manage_stack(profile, region): - outputs = manage_cloudformation_stack( + outputs: StackOutput = manage_cloudformation_stack( profile=None, region=region, stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() ) - try: - bucket_name = next(o for o in outputs if o["OutputKey"] == "SourceBucket")["OutputValue"] - except StopIteration as ex: + bucket_name = outputs.get("SourceBucket") + if bucket_name is None: msg = ( "Stack " + SAM_CLI_STACK_NAME + " exists, but is missing the managed source bucket key. " "Failing as this stack was likely not created by the AWS SAM CLI." ) - raise UserException(msg) from ex + raise UserException(msg) # This bucket name is what we would write to a config file return bucket_name +def get_current_account_id(profile: Optional[str] = None): + """Returns account ID based on used AWS credentials.""" + session = boto3.Session(profile_name=profile) # type: ignore + sts_client = session.client("sts") + try: + caller_identity = sts_client.get_caller_identity() + except ClientError as ex: + if ex.response["Error"]["Code"] == "InvalidClientTokenId": + raise CredentialsError("Cannot identify account due to invalid configured credentials.") from ex + raise CredentialsError("Cannot identify account based on configured credentials.") from ex + if "Account" not in caller_identity: + raise CredentialsError("Cannot identify account based on configured credentials.") + return caller_identity["Account"] + + def _get_stack_template(): gc = GlobalConfig() info = {"version": __version__, "installationId": gc.installation_id if gc.installation_id else "unknown"} diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index 996ac5f648..5af1c0080a 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -41,6 +41,12 @@ def __init__(self, config_dir, filename=None): """ self.filepath = Path(config_dir, filename or DEFAULT_CONFIG_FILE_NAME) + def get_stage_names(self): + self._read() + if isinstance(self.document, dict): + return [stage for stage, value in self.document.items() if isinstance(value, dict)] + return [] + def get_all(self, cmd_names, section, env=DEFAULT_ENV): """ Gets a value from the configuration file for the given environment, command and section @@ -153,6 +159,10 @@ def sanity_check(self): def exists(self): return self.filepath.exists() + def _ensure_exists(self): + self.filepath.parent.mkdir(parents=True, exist_ok=True) + self.filepath.touch() + def path(self): return str(self.filepath) @@ -183,8 +193,8 @@ def _read(self): def _write(self): if not self.document: return - if not self.exists(): - open(self.filepath, "a+").close() + + self._ensure_exists() current_version = self._version() if self._version() else SAM_CONFIG_VERSION try: diff --git a/samcli/lib/cookiecutter/exceptions.py b/samcli/lib/cookiecutter/exceptions.py index af19364811..5d379228d8 100644 --- a/samcli/lib/cookiecutter/exceptions.py +++ b/samcli/lib/cookiecutter/exceptions.py @@ -4,8 +4,8 @@ class CookiecutterErrorException(Exception): fmt = "An unspecified error occurred" - def __init__(self, **kwargs): - msg = self.fmt.format(**kwargs) + def __init__(self, **kwargs): # type: ignore + msg: str = self.fmt.format(**kwargs) Exception.__init__(self, msg) self.kwargs = kwargs diff --git a/samcli/lib/cookiecutter/interactive_flow.py b/samcli/lib/cookiecutter/interactive_flow.py index 996ac89ce3..95ce846dc0 100644 --- a/samcli/lib/cookiecutter/interactive_flow.py +++ b/samcli/lib/cookiecutter/interactive_flow.py @@ -1,7 +1,10 @@ """A flow of questions to be asked to the user in an interactive way.""" -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, List, Tuple + +import click from .question import Question +from ..utils.colors import Colored class InteractiveFlow: @@ -20,6 +23,7 @@ def __init__(self, questions: Dict[str, Question], first_question_key: str): self._questions: Dict[str, Question] = questions self._first_question_key: str = first_question_key self._current_question: Optional[Question] = None + self._color = Colored() def advance_to_next_question(self, current_answer: Optional[Any] = None) -> Optional[Question]: """ @@ -61,9 +65,25 @@ def run( associated to the key of the corresponding question """ context = context.copy() + answers: List[Tuple[str, Any]] = [] + question = self.advance_to_next_question() while question: answer = question.ask(context=context) context[question.key] = answer + answers.append((question.key, answer)) question = self.advance_to_next_question(answer) + + # print summary + click.echo(self._color.bold("SUMMARY")) + click.echo("We will generate a pipeline config file based on the following information:") + + for question_key, answer in answers: + if answer is None: + # ignore unanswered questions + continue + + question = self._questions[question_key] + click.echo(f"\t{question.text}: {self._color.underline(str(answer))}") + return context diff --git a/samcli/lib/cookiecutter/interactive_flow_creator.py b/samcli/lib/cookiecutter/interactive_flow_creator.py index d861174951..b3552d4065 100644 --- a/samcli/lib/cookiecutter/interactive_flow_creator.py +++ b/samcli/lib/cookiecutter/interactive_flow_creator.py @@ -17,7 +17,7 @@ class QuestionsFailedParsingException(UserException): class InteractiveFlowCreator: @staticmethod - def create_flow(flow_definition_path: str, extra_context: Optional[Dict] = None): + def create_flow(flow_definition_path: str, extra_context: Optional[Dict] = None) -> InteractiveFlow: """ This method parses the given json/yaml file to create an InteractiveFLow. It expects the file to define a list of questions. It parses the questions and add it to the flow in the same order they are defined @@ -77,7 +77,7 @@ def _load_questions( questions_definition = InteractiveFlowCreator._parse_questions_definition(flow_definition_path, extra_context) try: - for question in questions_definition.get("questions"): + for question in questions_definition.get("questions", []): q = QuestionFactory.create_question_from_json(question) if not first_question_key: first_question_key = q.key @@ -90,7 +90,7 @@ def _load_questions( raise QuestionsFailedParsingException(f"Failed to parse questions: {str(ex)}") from ex @staticmethod - def _parse_questions_definition(file_path, extra_context: Optional[Dict] = None): + def _parse_questions_definition(file_path: str, extra_context: Optional[Dict] = None) -> Dict: """ Read the questions definition file, do variable substitution, parse it as JSON/YAML diff --git a/samcli/lib/cookiecutter/processor.py b/samcli/lib/cookiecutter/processor.py index 5994c77949..4f34df06f8 100644 --- a/samcli/lib/cookiecutter/processor.py +++ b/samcli/lib/cookiecutter/processor.py @@ -9,7 +9,7 @@ class Processor(ABC): """ @abstractmethod - def run(self, context: Dict): + def run(self, context: Dict) -> Dict: """ the processing logic of this processor diff --git a/samcli/lib/cookiecutter/question.py b/samcli/lib/cookiecutter/question.py index 786836a400..4fad0ea020 100644 --- a/samcli/lib/cookiecutter/question.py +++ b/samcli/lib/cookiecutter/question.py @@ -1,4 +1,5 @@ """ This module represents the questions to ask to the user to fulfill the cookiecutter context. """ +from abc import ABC, abstractmethod from enum import Enum from typing import Any, Dict, List, Optional, Type, Union @@ -14,7 +15,18 @@ class QuestionKind(Enum): default = "default" -class Question: +class Promptable(ABC): + """ + Abstract class Question, Info, Choice, Confirm implement. + These classes need to implement their own prompt() method to prompt differently. + """ + + @abstractmethod + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + pass + + +class Question(Promptable): """ A question to be prompt to the user in an interactive flow where the response is used to fulfill the cookiecutter context. @@ -53,12 +65,14 @@ def __init__( text: str, default: Optional[Union[str, Dict]] = None, is_required: Optional[bool] = None, + allow_autofill: Optional[bool] = None, next_question_map: Optional[Dict[str, str]] = None, default_next_question_key: Optional[str] = None, ): self._key = key self._text = text self._required = is_required + self._allow_autofill = allow_autofill self._default_answer = default # if it is an optional question, set an empty default answer to prevent click from keep asking for an answer if not self._required and self._default_answer is None: @@ -67,30 +81,30 @@ def __init__( self._default_next_question_key = default_next_question_key @property - def key(self): + def key(self) -> str: return self._key @property - def text(self): + def text(self) -> str: return self._text @property - def default_answer(self): - return self._default_answer + def default_answer(self) -> Optional[Any]: + return self._resolve_default_answer() @property - def required(self): + def required(self) -> Optional[bool]: return self._required @property - def next_question_map(self): + def next_question_map(self) -> Dict[str, str]: return self._next_question_map @property - def default_next_question_key(self): + def default_next_question_key(self) -> Optional[str]: return self._default_next_question_key - def ask(self, context: Dict) -> Any: + def ask(self, context: Optional[Dict] = None) -> Any: """ prompt the user this question @@ -104,7 +118,20 @@ def ask(self, context: Dict) -> Any: The user provided answer. """ resolved_default_answer = self._resolve_default_answer(context) - return click.prompt(text=self._text, default=resolved_default_answer) + + # skip the question and directly use the default value if autofill is allowed. + if resolved_default_answer is not None and self._allow_autofill: + return resolved_default_answer + + # if it is an optional question with no default answer, + # set an empty default answer to prevent click from keep asking for an answer + if not self._required and resolved_default_answer is None: + resolved_default_answer = "" + + return self.prompt(self._resolve_text(context), resolved_default_answer) + + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + return click.prompt(text=text, default=default_answer) def get_next_question_key(self, answer: Any) -> Optional[str]: # _next_question_map is a Dict[str(answer), str(next question key)] @@ -112,7 +139,7 @@ def get_next_question_key(self, answer: Any) -> Optional[str]: answer = str(answer) return self._next_question_map.get(answer, self._default_next_question_key) - def set_default_next_question_key(self, next_question_key): + def set_default_next_question_key(self, next_question_key: str) -> None: self._default_next_question_key = next_question_key def _resolve_key_path(self, key_path: List, context: Dict) -> List[str]: @@ -150,49 +177,59 @@ def _resolve_key_path(self, key_path: List, context: Dict) -> List[str]: raise ValueError(f'Invalid value "{unresolved_key}" in key path') return resolved_key_path - def _resolve_default_answer(self, context: Dict) -> Optional[Any]: + def _resolve_value_from_expression(self, expression: Any, context: Optional[Dict] = None) -> Optional[Any]: """ - a question may have a default answer provided directly through the "default_answer" value + a question may have a value provided directly as string or number value or indirectly from cookiecutter context using a key path Parameters ---------- context - Cookiecutter context used to resolve default values and answered questions' answers. + Cookiecutter context used to resolve values. Raises ------ KeyError - When default value depends on the answer to a non-existent question + When an expression depends on the answer to a non-existent question ValueError - The default value is malformed + The expression is malformed Returns ------- - Optional default answer, it might be resolved from cookiecutter context using specified key path. + Optional value, it might be resolved from cookiecutter context using specified key path. """ - if isinstance(self._default_answer, dict): + if isinstance(expression, dict): + context = context if context else {} + # load value using key path from cookiecutter - if "keyPath" not in self._default_answer: - raise KeyError(f'Missing key "keyPath" in question default "{self._default_answer}".') - unresolved_key_path = self._default_answer.get("keyPath", []) + if "keyPath" not in expression: + raise KeyError(f'Missing key "keyPath" in "{expression}".') + unresolved_key_path = expression.get("keyPath", []) if not isinstance(unresolved_key_path, list): - raise ValueError(f'Invalid default answer "{self._default_answer}" for question {self.key}') + raise ValueError(f'Invalid expression "{expression}" in question {self.key}') return context.get(str(self._resolve_key_path(unresolved_key_path, context))) + return expression + + def _resolve_text(self, context: Optional[Dict] = None) -> str: + resolved_text = self._resolve_value_from_expression(self._text, context) + if resolved_text is None: + raise ValueError(f"Cannot resolve value from expression: {self._text}") + return str(resolved_text) - return self._default_answer + def _resolve_default_answer(self, context: Optional[Dict] = None) -> Optional[Any]: + return self._resolve_value_from_expression(self._default_answer, context) class Info(Question): - def ask(self, context: Dict) -> None: - return click.echo(message=self._text) + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + return click.echo(message=text) class Confirm(Question): - def ask(self, context: Dict) -> bool: - return click.confirm(text=self._text) + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + return click.confirm(text=text) class Choice(Question): @@ -203,26 +240,27 @@ def __init__( options: List[str], default: Optional[str] = None, is_required: Optional[bool] = None, + allow_autofill: Optional[bool] = None, next_question_map: Optional[Dict[str, str]] = None, default_next_question_key: Optional[str] = None, ): if not options: raise ValueError("No defined options") self._options = options - super().__init__(key, text, default, is_required, next_question_map, default_next_question_key) + super().__init__(key, text, default, is_required, allow_autofill, next_question_map, default_next_question_key) - def ask(self, context: Dict) -> str: - resolved_default_answer = self._resolve_default_answer(context) - click.echo(self._text) + def prompt(self, text: str, default_answer: Optional[Any]) -> Any: + click.echo(text) for index, option in enumerate(self._options): click.echo(f"\t{index + 1} - {option}") options_indexes = self._get_options_indexes(base=1) choices = list(map(str, options_indexes)) choice = click.prompt( text="Choice", - default=resolved_default_answer, + default=default_answer, show_choices=False, type=click.Choice(choices), + show_default=default_answer is not None, ) return self._options[int(choice) - 1] @@ -245,6 +283,7 @@ def create_question_from_json(question_json: Dict) -> Question: options = question_json.get("options") default = question_json.get("default") is_required = question_json.get("isRequired") + allow_autofill = question_json.get("allowAutofill") next_question_map = question_json.get("nextQuestion") default_next_question = question_json.get("defaultNextQuestion") kind_str = question_json.get("kind") @@ -256,6 +295,7 @@ def create_question_from_json(question_json: Dict) -> Question: "text": text, "default": default, "is_required": is_required, + "allow_autofill": allow_autofill, "next_question_map": next_question_map, "default_next_question_key": default_next_question, } diff --git a/samcli/lib/cookiecutter/template.py b/samcli/lib/cookiecutter/template.py index c7d643bb43..46b851985e 100644 --- a/samcli/lib/cookiecutter/template.py +++ b/samcli/lib/cookiecutter/template.py @@ -3,15 +3,17 @@ values of the context and how to generate a project from the given template and provided context """ import logging -from typing import Any, Dict, List, Optional +from typing import Dict, List, Optional + from cookiecutter.exceptions import RepositoryNotFound, UnknownRepoType from cookiecutter.main import cookiecutter + from samcli.commands.exceptions import UserException from samcli.lib.init.arbitrary_project import generate_non_cookiecutter_project +from .exceptions import GenerateProjectFailedError, InvalidLocationError, PreprocessingError, PostprocessingError from .interactive_flow import InteractiveFlow from .plugin import Plugin from .processor import Processor -from .exceptions import GenerateProjectFailedError, InvalidLocationError, PreprocessingError, PostprocessingError LOG = logging.getLogger(__name__) @@ -41,6 +43,8 @@ class Template: An optional series of plugins to be plugged in. A plugin defines its own interactive_flow, preprocessor and postprocessor. A plugin is a sub-set of the template, if there is a common behavior among multiple templates, it is better to be extracted to a plugin that can then be plugged in to each of these templates. + metadata: Optional[Dict] + An optional dictionary with extra information about the template Methods ------- @@ -61,6 +65,7 @@ def __init__( preprocessors: Optional[List[Processor]] = None, postprocessors: Optional[List[Processor]] = None, plugins: Optional[List[Plugin]] = None, + metadata: Optional[Dict] = None, ): """ Initialize the class @@ -84,6 +89,8 @@ def __init__( An optional series of plugins to be plugged in. A plugin defines its own interactive_flow, preprocessor and postprocessor. A plugin is a sub-set of the template, if there is a common behavior among multiple templates, it is better to be extracted to a plugin that can then be plugged in to each of these templates. + metadata: Optional[Dict] + An optional dictionary with extra information about the template """ self._location = location self._interactive_flows = interactive_flows or [] @@ -97,8 +104,9 @@ def __init__( self._preprocessors.append(plugin.preprocessor) if plugin.postprocessor: self._postprocessors.append(plugin.postprocessor) + self.metadata = metadata - def run_interactive_flows(self) -> Dict: + def run_interactive_flows(self, context: Optional[Dict] = None) -> Dict: """ prompt the user a series of questions' flows and gather the answers to create the cookiecutter context. The questions are identified by keys. If multiple questions, whether within the same flow or across @@ -112,14 +120,14 @@ def run_interactive_flows(self) -> Dict: A Dictionary in the form of {question.key: answer} representing user's answers to the flows' questions """ try: - context: Dict[str, Any] = {} + context = context if context else {} for flow in self._interactive_flows: context = flow.run(context) return context except Exception as e: raise UserException(str(e), wrapped_from=e.__class__.__name__) from e - def generate_project(self, context: Dict): + def generate_project(self, context: Dict, output_dir: str) -> None: """ Generates a project based on this cookiecutter template and the given context. The context is first processed and manipulated by series of preprocessors(if any) then the project is generated and finally @@ -129,6 +137,8 @@ def generate_project(self, context: Dict): ---------- context: Dict the cookiecutter context to fulfill the values of cookiecutter.json keys + output_dir: str + the directory where project will be generated in Raise: ------ @@ -144,7 +154,13 @@ def generate_project(self, context: Dict): try: LOG.debug("Baking a new template with cookiecutter with all parameters") - cookiecutter(template=self._location, output_dir=".", no_input=True, extra_context=context) + cookiecutter( + template=self._location, + output_dir=output_dir, + no_input=True, + extra_context=context, + overwrite_if_exists=True, + ) except RepositoryNotFound as e: # cookiecutter.json is not found in the template. Let's just clone it directly without # using cookiecutter and call it done. diff --git a/samcli/lib/pipeline/__init__.py b/samcli/lib/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/pipeline/bootstrap/__init__.py b/samcli/lib/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/pipeline/bootstrap/resource.py b/samcli/lib/pipeline/bootstrap/resource.py new file mode 100644 index 0000000000..a7b39dd965 --- /dev/null +++ b/samcli/lib/pipeline/bootstrap/resource.py @@ -0,0 +1,138 @@ +""" Represents AWS resource""" +from typing import Optional + + +class ARNParts: + """ + Decompose a given ARN into its parts https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html + + Attributes + ---------- + partition: str + the partition part(AWS, aws-cn or aws-us-gov) of the ARN + service: str + the service part(S3, IAM, ECR, ...etc) of the ARN + region: str + the AWS region part(us-east-1, eu-west-1, ...etc) of the ARN + account-id: str + the account-id part of the ARN + resource-id: str + the resource-id part of the ARN + resource-type: str + the resource-type part of the ARN + """ + + partition: str + service: str + region: str + account_id: str + resource_id: str + + def __init__(self, arn: str) -> None: + parts = arn.split(":") + try: + [_, self.partition, self.service, self.region, self.account_id, self.resource_id] = parts + except ValueError as ex: + raise ValueError(f"Invalid ARN ({arn})") from ex + + +class Resource: + """ + Represents an AWS resource + + Attributes + ---------- + arn: str + the ARN of the resource + comment: str + the comment of the resource + is_user_provided: bool + True if the user provided the ARN of the resource during the initialization. It indicates whether this pipeline- + resource is provided by the user or created by SAM during `sam pipeline bootstrap` + + Methods + ------- + name(self) -> Optional[str]: + extracts and returns the resource name from its ARN + """ + + def __init__(self, arn: Optional[str], comment: Optional[str]) -> None: + self.arn: Optional[str] = arn + self.comment: Optional[str] = comment + self.is_user_provided: bool = bool(arn) + + def name(self) -> Optional[str]: + """ + extracts and returns the resource name from its ARN + Raises + ------ + ValueError if the ARN is invalid + """ + if not self.arn: + return None + arn_parts: ARNParts = ARNParts(arn=self.arn) + return arn_parts.resource_id + + +class IAMUser(Resource): + """ + Represents an AWS IAM User resource + Attributes + ---------- + access_key_id: Optional[str] + holds the AccessKeyId of the credential of this IAM user, if any. + secret_access_key: Optional[str] + holds the SecretAccessKey of the credential of this IAM user, if any. + """ + + def __init__( + self, + arn: Optional[str], + comment: Optional[str], + access_key_id: Optional[str] = None, + secret_access_key: Optional[str] = None, + ) -> None: + self.access_key_id: Optional[str] = access_key_id + self.secret_access_key: Optional[str] = secret_access_key + super().__init__(arn=arn, comment=comment) + + +class S3Bucket(Resource): + """ + Represents an AWS S3Bucket resource + Attributes + ---------- + kms_key_arn: Optional[str] + The ARN of the KMS key used in encrypting this S3Bucket, if any. + """ + + def __init__(self, arn: Optional[str], comment: Optional[str], kms_key_arn: Optional[str] = None) -> None: + self.kms_key_arn: Optional[str] = kms_key_arn + super().__init__(arn=arn, comment=comment) + + +class ECRImageRepository(Resource): + """ Represents an AWS ECR image repository resource """ + + def __init__(self, arn: Optional[str], comment: Optional[str]) -> None: + super().__init__(arn=arn, comment=comment) + + def get_uri(self) -> Optional[str]: + """ + extracts and returns the URI of the given ECR image repository from its ARN + see https://docs.aws.amazon.com/AmazonECR/latest/userguide/Registries.html + Raises + ------ + ValueError if the ARN is invalid + """ + if not self.arn: + return None + arn_parts: ARNParts = ARNParts(self.arn) + # ECR's resource_id contains the resource-type("resource") which is excluded from the URL + # from docs: https://docs.aws.amazon.com/AmazonECR/latest/userguide/security_iam_service-with-iam.html + # ECR's ARN: arn:${Partition}:ecr:${Region}:${Account}:repository/${Repository-name} + if not arn_parts.resource_id.startswith("repository/"): + raise ValueError(f"Invalid ECR ARN ({self.arn}), can't extract the URL from it.") + i = len("repository/") + repo_name = arn_parts.resource_id[i:] + return f"{arn_parts.account_id}.dkr.ecr.{arn_parts.region}.amazonaws.com/{repo_name}" diff --git a/samcli/lib/pipeline/bootstrap/stage.py b/samcli/lib/pipeline/bootstrap/stage.py new file mode 100644 index 0000000000..d98081237b --- /dev/null +++ b/samcli/lib/pipeline/bootstrap/stage.py @@ -0,0 +1,330 @@ +""" Application Environment """ +import json +import os +import pathlib +import re +from itertools import chain +from typing import Dict, List, Optional, Tuple + +import boto3 +import click + +from samcli.lib.config.samconfig import SamConfig +from samcli.lib.utils.colors import Colored +from samcli.lib.utils.managed_cloudformation_stack import manage_stack, StackOutput +from samcli.lib.pipeline.bootstrap.resource import Resource, IAMUser, ECRImageRepository + +CFN_TEMPLATE_PATH = str(pathlib.Path(os.path.dirname(__file__))) +STACK_NAME_PREFIX = "aws-sam-cli-managed" +STAGE_RESOURCES_STACK_NAME_SUFFIX = "pipeline-resources" +STAGE_RESOURCES_CFN_TEMPLATE = "stage_resources.yaml" +PIPELINE_USER = "pipeline_user" +PIPELINE_EXECUTION_ROLE = "pipeline_execution_role" +CLOUDFORMATION_EXECUTION_ROLE = "cloudformation_execution_role" +ARTIFACTS_BUCKET = "artifacts_bucket" +ECR_IMAGE_REPOSITORY = "image_repository" +REGION = "region" + + +class Stage: + """ + Represents an application stage: Beta, Gamma, Prod ...etc + + Attributes + ---------- + name: str + The name of the environment + aws_profile: Optional[str] + The named AWS profile (in user's machine) of the AWS account to deploy this environment to. + aws_region: Optional[str] + The AWS region to deploy this environment to. + pipeline_user: IAMUser + The IAM User having its AccessKeyId and SecretAccessKey credentials shared with the CI/CD system + pipeline_execution_role: Resource + The IAM role assumed by the pipeline-user to get access to the AWS account and executes the + CloudFormation stack. + cloudformation_execution_role: Resource + The IAM role assumed by the CloudFormation service to executes the CloudFormation stack. + artifacts_bucket: Resource + The S3 bucket to hold the SAM build artifacts of the application's CFN template. + create_image_repository: bool + A boolean flag that determines whether the user wants to create an ECR image repository or not + image_repository: ECRImageRepository + The ECR image repository to hold the image container of lambda functions with Image package-type + + Methods: + -------- + did_user_provide_all_required_resources(self) -> bool: + checks if all of the environment's required resources (pipeline_user, pipeline_execution_role, + cloudformation_execution_role, artifacts_bucket and image_repository) are provided by the user. + bootstrap(self, confirm_changeset: bool = True) -> None: + deploys the CFN template ./stage_resources.yaml to the AWS account identified by aws_profile and + aws_region member fields. if aws_profile is not provided, it will fallback to default boto3 credentials' + resolving. Note that ./stage_resources.yaml template accepts the ARNs of already existing resources(if + any) as parameters and it will skip the creation of those resources but will use the ARNs to set the proper + permissions of other missing resources(resources created by the template) + save_config(self, config_dir: str, filename: str, cmd_names: List[str]): + save the Artifacts bucket name, ECR image repository URI and ARNs of pipeline_user, pipeline_execution_role and + cloudformation_execution_role to the "pipelineconfig.toml" file so that it can be auto-filled during + the `sam pipeline init` command. + print_resources_summary(self) -> None: + prints to the screen(console) the ARNs of the created and provided resources. + """ + + def __init__( + self, + name: str, + aws_profile: Optional[str] = None, + aws_region: Optional[str] = None, + pipeline_user_arn: Optional[str] = None, + pipeline_execution_role_arn: Optional[str] = None, + cloudformation_execution_role_arn: Optional[str] = None, + artifacts_bucket_arn: Optional[str] = None, + create_image_repository: bool = False, + image_repository_arn: Optional[str] = None, + ) -> None: + self.name: str = name + self.aws_profile: Optional[str] = aws_profile + self.aws_region: Optional[str] = aws_region + self.pipeline_user: IAMUser = IAMUser(arn=pipeline_user_arn, comment="Pipeline IAM user") + self.pipeline_execution_role: Resource = Resource( + arn=pipeline_execution_role_arn, comment="Pipeline execution role" + ) + self.cloudformation_execution_role: Resource = Resource( + arn=cloudformation_execution_role_arn, comment="CloudFormation execution role" + ) + self.artifacts_bucket: Resource = Resource(arn=artifacts_bucket_arn, comment="Artifact bucket") + self.create_image_repository: bool = create_image_repository + self.image_repository: ECRImageRepository = ECRImageRepository( + arn=image_repository_arn, comment="ECR image repository" + ) + self.color = Colored() + + def did_user_provide_all_required_resources(self) -> bool: + """Check if the user provided all of the environment resources or not""" + return all(resource.is_user_provided for resource in self._get_resources()) + + def _get_non_user_provided_resources_msg(self) -> str: + resource_comments = chain.from_iterable( + [ + [] if self.pipeline_user.is_user_provided else [self.pipeline_user.comment], + [] if self.pipeline_execution_role.is_user_provided else [self.pipeline_execution_role.comment], + [] + if self.cloudformation_execution_role.is_user_provided + else [self.cloudformation_execution_role.comment], + [] if self.artifacts_bucket.is_user_provided else [self.artifacts_bucket.comment], + [] + if self.image_repository.is_user_provided or not self.create_image_repository + else [self.image_repository.comment], + ] + ) + return "\n".join([f"\t- {comment}" for comment in resource_comments]) + + def bootstrap(self, confirm_changeset: bool = True) -> bool: + """ + Deploys the CFN template(./stage_resources.yaml) which deploys: + * Pipeline IAM User + * Pipeline execution IAM role + * CloudFormation execution IAM role + * Artifacts' S3 Bucket + * ECR image repository + to the AWS account associated with the given environment. It will not redeploy the stack if already exists. + This CFN template accepts the ARNs of the resources as parameters and will not create a resource if already + provided, this way we can conditionally create a resource only if the user didn't provide it + + THIS METHOD UPDATES THE STATE OF THE CALLING INSTANCE(self) IT WILL SET THE VALUES OF THE RESOURCES ATTRIBUTES + + Parameters + ---------- + confirm_changeset: bool + if set to false, the stage_resources.yaml CFN template will directly be deployed, otherwise, + the user will be prompted for confirmation + + Returns True if bootstrapped, otherwise False + """ + + if self.did_user_provide_all_required_resources(): + click.secho( + self.color.yellow(f"\nAll required resources for the {self.name} environment exist, skipping creation.") + ) + return True + + missing_resources_msg: str = self._get_non_user_provided_resources_msg() + click.echo( + f"This will create the following required resources for the '{self.name}' environment: \n" + f"{missing_resources_msg}" + ) + if confirm_changeset: + confirmed: bool = click.confirm("Should we proceed with the creation?") + if not confirmed: + click.secho(self.color.red("Canceling pipeline bootstrap creation.")) + return False + + environment_resources_template_body = Stage._read_template(STAGE_RESOURCES_CFN_TEMPLATE) + output: StackOutput = manage_stack( + stack_name=self._get_stack_name(), + region=self.aws_region, + profile=self.aws_profile, + template_body=environment_resources_template_body, + parameter_overrides={ + "PipelineUserArn": self.pipeline_user.arn or "", + "PipelineExecutionRoleArn": self.pipeline_execution_role.arn or "", + "CloudFormationExecutionRoleArn": self.cloudformation_execution_role.arn or "", + "ArtifactsBucketArn": self.artifacts_bucket.arn or "", + "CreateImageRepository": "true" if self.create_image_repository else "false", + "ImageRepositoryArn": self.image_repository.arn or "", + }, + ) + + pipeline_user_secret_sm_id = output.get("PipelineUserSecretKey") + + self.pipeline_user.arn = output.get("PipelineUser") + if pipeline_user_secret_sm_id: + ( + self.pipeline_user.access_key_id, + self.pipeline_user.secret_access_key, + ) = Stage._get_pipeline_user_secret_pair(pipeline_user_secret_sm_id, self.aws_profile, self.aws_region) + self.pipeline_execution_role.arn = output.get("PipelineExecutionRole") + self.cloudformation_execution_role.arn = output.get("CloudFormationExecutionRole") + self.artifacts_bucket.arn = output.get("ArtifactsBucket") + self.image_repository.arn = output.get("ImageRepository") + return True + + @staticmethod + def _get_pipeline_user_secret_pair( + secret_manager_arn: str, profile: Optional[str], region: Optional[str] + ) -> Tuple[str, str]: + """ + Helper method to fetch pipeline user's AWS Credentials from secrets manager. + SecretString need to be in following JSON format: + { + "aws_access_key_id": "AWSSECRETACCESSKEY123", + "aws_secret_access_key": "mYSuperSecretDummyKey" + } + Parameters + ---------- + secret_manager_arn: + ARN of secret manager entry which holds pipeline user key. + profile: + The named AWS profile (in user's machine) of the AWS account to deploy this environment to. + region: + The AWS region to deploy this environment to. + + Returns tuple of aws_access_key_id and aws_secret_access_key. + + """ + session = boto3.Session(profile_name=profile, region_name=region if region else None) # type: ignore + secrets_manager_client = session.client("secretsmanager") + response = secrets_manager_client.get_secret_value(SecretId=secret_manager_arn) + secret_string = response["SecretString"] + secret_json = json.loads(secret_string) + return secret_json["aws_access_key_id"], secret_json["aws_secret_access_key"] + + @staticmethod + def _read_template(template_file_name: str) -> str: + template_path: str = os.path.join(CFN_TEMPLATE_PATH, template_file_name) + with open(template_path, "r", encoding="utf-8") as fp: + template_body = fp.read() + return template_body + + def save_config(self, config_dir: str, filename: str, cmd_names: List[str]) -> None: + """ + save the Artifacts bucket name, ECR image repository URI and ARNs of pipeline_user, pipeline_execution_role and + cloudformation_execution_role to the given filename and directory. + + Parameters + ---------- + config_dir: str + the directory of the toml file to save to + filename: str + the name of the toml file to save to + cmd_names: List[str] + nested command name to scope the saved configs to inside the toml file + + Raises + ------ + ValueError: if the artifacts_bucket or ImageRepository ARNs are invalid + """ + + samconfig: SamConfig = SamConfig(config_dir=config_dir, filename=filename) + + if self.pipeline_user.arn: + samconfig.put(cmd_names=cmd_names, section="parameters", key=PIPELINE_USER, value=self.pipeline_user.arn) + + # Computing Artifacts bucket name and ECR image repository URL may through an exception if the ARNs are wrong + # Let's swallow such an exception to be able to save the remaining resources + try: + artifacts_bucket_name: Optional[str] = self.artifacts_bucket.name() + except ValueError: + artifacts_bucket_name = "" + try: + image_repository_uri: Optional[str] = self.image_repository.get_uri() or "" + except ValueError: + image_repository_uri = "" + + environment_specific_configs: Dict[str, Optional[str]] = { + PIPELINE_EXECUTION_ROLE: self.pipeline_execution_role.arn, + CLOUDFORMATION_EXECUTION_ROLE: self.cloudformation_execution_role.arn, + ARTIFACTS_BUCKET: artifacts_bucket_name, + # even image repository can be None, we want to save it as empty string + # so that pipeline init command can pick it up + ECR_IMAGE_REPOSITORY: image_repository_uri, + REGION: self.aws_region, + } + + for key, value in environment_specific_configs.items(): + if value is not None: + samconfig.put( + cmd_names=cmd_names, + section="parameters", + key=key, + value=value, + env=self.name, + ) + + samconfig.flush() + + def save_config_safe(self, config_dir: str, filename: str, cmd_names: List[str]) -> None: + """ + A safe version of save_config method that doesn't raise any exception + """ + try: + self.save_config(config_dir, filename, cmd_names) + except Exception: + pass + + def _get_resources(self) -> List[Resource]: + resources = [ + self.pipeline_user, + self.pipeline_execution_role, + self.cloudformation_execution_role, + self.artifacts_bucket, + ] + if self.create_image_repository or self.image_repository.arn: # Image Repository is optional + resources.append(self.image_repository) + return resources + + def print_resources_summary(self) -> None: + """prints to the screen(console) the ARNs of the created and provided resources.""" + + provided_resources = [] + created_resources = [] + for resource in self._get_resources(): + if resource.is_user_provided: + provided_resources.append(resource) + else: + created_resources.append(resource) + + if created_resources: + click.secho(self.color.green("The following resources were created in your account:")) + for resource in created_resources: + click.secho(self.color.green(f"\t- {resource.comment}")) + + if not self.pipeline_user.is_user_provided: + click.secho(self.color.green("Pipeline IAM user credential:")) + click.secho(self.color.green(f"\tAWS_ACCESS_KEY_ID: {self.pipeline_user.access_key_id}")) + click.secho(self.color.green(f"\tAWS_SECRET_ACCESS_KEY: {self.pipeline_user.secret_access_key}")) + + def _get_stack_name(self) -> str: + sanitized_stage_name: str = re.sub("[^0-9a-zA-Z]+", "-", self.name) + return f"{STACK_NAME_PREFIX}-{sanitized_stage_name}-{STAGE_RESOURCES_STACK_NAME_SUFFIX}" diff --git a/samcli/lib/pipeline/bootstrap/stage_resources.yaml b/samcli/lib/pipeline/bootstrap/stage_resources.yaml new file mode 100644 index 0000000000..bcc5e94423 --- /dev/null +++ b/samcli/lib/pipeline/bootstrap/stage_resources.yaml @@ -0,0 +1,358 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +Parameters: + PipelineUserArn: + Type: String + PipelineExecutionRoleArn: + Type: String + CloudFormationExecutionRoleArn: + Type: String + ArtifactsBucketArn: + Type: String + CreateImageRepository: + Type: String + Default: false + AllowedValues: [true, false] + ImageRepositoryArn: + Type: String + +Conditions: + MissingPipelineUser: !Equals [!Ref PipelineUserArn, ""] + MissingPipelineExecutionRole: !Equals [!Ref PipelineExecutionRoleArn, ""] + MissingCloudFormationExecutionRole: !Equals [!Ref CloudFormationExecutionRoleArn, ""] + MissingArtifactsBucket: !Equals [!Ref ArtifactsBucketArn, ""] + ShouldHaveImageRepository: !Or [!Equals [!Ref CreateImageRepository, "true"], !Not [!Equals [!Ref ImageRepositoryArn, ""]]] + MissingImageRepository: !And [!Condition ShouldHaveImageRepository, !Equals [!Ref ImageRepositoryArn, ""]] + +Resources: + PipelineUser: + Type: AWS::IAM::User + Condition: MissingPipelineUser + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + Policies: + - PolicyName: AssumeRoles + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "sts:AssumeRole" + Resource: "*" + Condition: + StringEquals: + aws:ResourceTag/Role: pipeline-execution-role + + PipelineUserAccessKey: + Type: AWS::IAM::AccessKey + Condition: MissingPipelineUser + Properties: + Serial: 1 + Status: Active + UserName: !Ref PipelineUser + + PipelineUserSecretKey: + Type: AWS::SecretsManager::Secret + Condition: MissingPipelineUser + Properties: + SecretString: !Sub '{"aws_access_key_id": "${PipelineUserAccessKey}", "aws_secret_access_key": "${PipelineUserAccessKey.SecretAccessKey}"}' + + CloudFormationExecutionRole: + Type: AWS::IAM::Role + Condition: MissingCloudFormationExecutionRole + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: cloudformation.amazonaws.com + Action: + - 'sts:AssumeRole' + Policies: + - PolicyName: GrantCloudFormationFullAccess + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: '*' + Resource: '*' + + PipelineExecutionRole: + Type: AWS::IAM::Role + Condition: MissingPipelineExecutionRole + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + - Key: Role + Value: pipeline-execution-role + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + AWS: + - Fn::If: + - MissingPipelineUser + - !GetAtt PipelineUser.Arn + - !Ref PipelineUserArn + Action: + - 'sts:AssumeRole' + - Effect: Allow + Principal: + # Allow roles with tag Role=aws-sam-pipeline-codebuild-service-role to assume this role. + # This is required when CodePipeline is the CI/CD system of choice. + AWS: + - !If + - MissingPipelineUser + - !Ref AWS::AccountId + - !Select [4, !Split [':', !Ref PipelineUserArn]] + Action: + - 'sts:AssumeRole' + Condition: + StringEquals: + aws:PrincipalTag/Role: aws-sam-pipeline-codebuild-service-role + + ArtifactsBucket: + Type: AWS::S3::Bucket + Condition: MissingArtifactsBucket + DeletionPolicy: "Retain" + Properties: + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + LoggingConfiguration: + DestinationBucketName: + !Ref ArtifactsLoggingBucket + LogFilePrefix: "artifacts-logs" + VersioningConfiguration: + Status: Enabled + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + + ArtifactsBucketPolicy: + Type: AWS::S3::BucketPolicy + Condition: MissingArtifactsBucket + Properties: + Bucket: !Ref ArtifactsBucket + PolicyDocument: + Statement: + - Effect: "Deny" + Action: "s3:*" + Principal: "*" + Resource: + - !Join [ '',[ !GetAtt ArtifactsBucket.Arn, '/*' ] ] + - !GetAtt ArtifactsBucket.Arn + Condition: + Bool: + aws:SecureTransport: false + - Effect: "Allow" + Action: + - 's3:GetObject*' + - 's3:PutObject*' + - 's3:GetBucket*' + - 's3:List*' + Resource: + - !Join ['',[!GetAtt ArtifactsBucket.Arn, '/*']] + - !GetAtt ArtifactsBucket.Arn + Principal: + AWS: + - Fn::If: + - MissingPipelineExecutionRole + - !GetAtt PipelineExecutionRole.Arn + - !Ref PipelineExecutionRoleArn + - Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + + ArtifactsLoggingBucket: + Type: AWS::S3::Bucket + Condition: MissingArtifactsBucket + DeletionPolicy: "Retain" + Properties: + AccessControl: "LogDeliveryWrite" + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + VersioningConfiguration: + Status: Enabled + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + + ArtifactsLoggingBucketPolicy: + Type: AWS::S3::BucketPolicy + Condition: MissingArtifactsBucket + Properties: + Bucket: !Ref ArtifactsLoggingBucket + PolicyDocument: + Statement: + - Effect: "Deny" + Action: "s3:*" + Principal: "*" + Resource: + - !Join [ '',[ !GetAtt ArtifactsLoggingBucket.Arn, '/*' ] ] + - !GetAtt ArtifactsLoggingBucket.Arn + Condition: + Bool: + aws:SecureTransport: false + + PipelineExecutionRolePermissionPolicy: + Type: AWS::IAM::Policy + Condition: MissingPipelineExecutionRole + Properties: + PolicyName: PipelineExecutionRolePermissions + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: 'iam:PassRole' + Resource: + Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + - Effect: Allow + Action: + - "cloudformation:CreateChangeSet" + - "cloudformation:DescribeChangeSet" + - "cloudformation:ExecuteChangeSet" + - "cloudformation:DescribeStackEvents" + - "cloudformation:DescribeStacks" + - "cloudformation:GetTemplateSummary" + - "cloudformation:DescribeStackResource" + Resource: '*' + - Effect: Allow + Action: + - 's3:GetObject*' + - 's3:PutObject*' + - 's3:GetBucket*' + - 's3:List*' + Resource: + Fn::If: + - MissingArtifactsBucket + - - !Join [ '',[ !GetAtt ArtifactsBucket.Arn, '/*' ] ] + - !GetAtt ArtifactsBucket.Arn + - - !Join [ '',[ !Ref ArtifactsBucketArn, '/*' ] ] + - !Ref ArtifactsBucketArn + - Fn::If: + - ShouldHaveImageRepository + - Effect: "Allow" + Action: "ecr:GetAuthorizationToken" + Resource: "*" + - !Ref AWS::NoValue + - Fn::If: + - ShouldHaveImageRepository + - Effect: "Allow" + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + - "ecr:BatchCheckLayerAvailability" + - "ecr:PutImage" + - "ecr:InitiateLayerUpload" + - "ecr:UploadLayerPart" + - "ecr:CompleteLayerUpload" + Resource: + Fn::If: + - MissingImageRepository + - !GetAtt ImageRepository.Arn + - !Ref ImageRepositoryArn + - !Ref AWS::NoValue + Roles: + - !Ref PipelineExecutionRole + + ImageRepository: + Type: AWS::ECR::Repository + Condition: MissingImageRepository + Properties: + RepositoryPolicyText: + Version: "2012-10-17" + Statement: + - Sid: LambdaECRImageRetrievalPolicy + Effect: Allow + Principal: + Service: lambda.amazonaws.com + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + - "ecr:GetRepositoryPolicy" + - "ecr:SetRepositoryPolicy" + - "ecr:DeleteRepositoryPolicy" + - Sid: AllowPushPull + Effect: Allow + Principal: + AWS: + - Fn::If: + - MissingPipelineExecutionRole + - !GetAtt PipelineExecutionRole.Arn + - !Ref PipelineExecutionRoleArn + - Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + - "ecr:BatchCheckLayerAvailability" + - "ecr:PutImage" + - "ecr:InitiateLayerUpload" + - "ecr:UploadLayerPart" + - "ecr:CompleteLayerUpload" + +Outputs: + PipelineUser: + Description: ARN of the Pipeline IAM User + Value: + Fn::If: + - MissingPipelineUser + - !GetAtt PipelineUser.Arn + - !Ref PipelineUserArn + + PipelineUserSecretKey: + Description: AWS Access Key and Secret Key of pipeline user. + Condition: MissingPipelineUser + Value: !Ref PipelineUserSecretKey + + CloudFormationExecutionRole: + Description: ARN of the IAM Role(CloudFormationExecutionRole) + Value: + Fn::If: + - MissingCloudFormationExecutionRole + - !GetAtt CloudFormationExecutionRole.Arn + - !Ref CloudFormationExecutionRoleArn + + PipelineExecutionRole: + Description: ARN of the IAM Role(PipelineExecutionRole) + Value: + Fn::If: + - MissingPipelineExecutionRole + - !GetAtt PipelineExecutionRole.Arn + - !Ref PipelineExecutionRoleArn + + ArtifactsBucket: + Description: ARN of the Artifacts bucket + Value: + Fn::If: + - MissingArtifactsBucket + - !GetAtt ArtifactsBucket.Arn + - !Ref ArtifactsBucketArn + + ImageRepository: + Description: ARN of the ECR image repository + Condition: ShouldHaveImageRepository + Value: + Fn::If: + - MissingImageRepository + - !GetAtt ImageRepository.Arn + - !Ref ImageRepositoryArn diff --git a/samcli/lib/utils/colors.py b/samcli/lib/utils/colors.py index 84e3cbdbd7..84767f0fec 100644 --- a/samcli/lib/utils/colors.py +++ b/samcli/lib/utils/colors.py @@ -58,6 +58,10 @@ def underline(self, msg): """Underline the input""" return click.style(msg, underline=True) if self.colorize else msg + def bold(self, msg): + """Bold the input""" + return click.style(msg, bold=True) if self.colorize else msg + def _color(self, msg, color): """Internal helper method to add colors to input""" kwargs = {"fg": color} diff --git a/samcli/lib/utils/defaults.py b/samcli/lib/utils/defaults.py new file mode 100644 index 0000000000..4a07b113ac --- /dev/null +++ b/samcli/lib/utils/defaults.py @@ -0,0 +1,8 @@ +""" +Contains helpers for providing default values +""" +from botocore.session import get_session + + +def get_default_aws_region() -> str: + return get_session().get_config_variable("region") or "us-east-1" diff --git a/samcli/lib/utils/git_repo.py b/samcli/lib/utils/git_repo.py index 33e4597726..ddc7fba52f 100644 --- a/samcli/lib/utils/git_repo.py +++ b/samcli/lib/utils/git_repo.py @@ -132,7 +132,7 @@ def clone(self, clone_dir: Path, clone_name: str, replace_existing: bool = False output = clone_error.output.decode("utf-8") if "not found" in output.lower(): LOG.warning("WARN: Could not clone repo %s", self.url, exc_info=clone_error) - raise CloneRepoException from clone_error + raise CloneRepoException(output) from clone_error finally: self.clone_attempted = True diff --git a/samcli/lib/utils/managed_cloudformation_stack.py b/samcli/lib/utils/managed_cloudformation_stack.py index 25973fbc8b..29d148a7d9 100644 --- a/samcli/lib/utils/managed_cloudformation_stack.py +++ b/samcli/lib/utils/managed_cloudformation_stack.py @@ -1,20 +1,17 @@ """ Bootstrap's user's development environment by creating cloud resources required by SAM CLI """ - import logging +from collections.abc import Collection +from typing import cast, Dict, List, Optional, Union import boto3 - import click - from botocore.config import Config from botocore.exceptions import ClientError, BotoCoreError, NoRegionError, NoCredentialsError, ProfileNotFound from samcli.commands.exceptions import UserException, CredentialsError, RegionError - -SAM_CLI_STACK_PREFIX = "aws-sam-cli-managed-" LOG = logging.getLogger(__name__) @@ -25,10 +22,45 @@ def __init__(self, ex): super().__init__(message=message_fmt.format(ex=self.ex)) -def manage_stack(profile, region, stack_name, template_body): +class StackOutput: + def __init__(self, stack_output: List[Dict[str, str]]): + self._stack_output: List[Dict[str, str]] = stack_output + + def get(self, key) -> Optional[str]: + try: + return next(o for o in self._stack_output if o.get("OutputKey") == key).get("OutputValue") + except StopIteration: + return None + + +def manage_stack( + region: Optional[str], + stack_name: str, + template_body: str, + profile: Optional[str] = None, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> StackOutput: + """ + get or create a CloudFormation stack + + Parameters + ---------- + region: str + AWS region for the CloudFormation stack + stack_name: str + CloudFormation stack name + template_body: str + CloudFormation template's content + profile: Optional[str] + AWS named profile for the AWS account + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] + Values of template parameters, if any. + + Returns: Stack output section(list of OutputKey, OutputValue pairs) + """ try: if profile: - session = boto3.Session(profile_name=profile, region_name=region if region else None) + session = boto3.Session(profile_name=profile, region_name=region if region else None) # type: ignore cloudformation_client = session.client("cloudformation") else: cloudformation_client = boto3.client( @@ -51,32 +83,41 @@ def manage_stack(profile, region, stack_name, template_body): "Error Setting Up Managed Stack Client: Unable to resolve a region. " "Please provide a region via the --region parameter or by the AWS_REGION environment variable." ) from ex - return _create_or_get_stack(cloudformation_client, stack_name, template_body) + return _create_or_get_stack(cloudformation_client, stack_name, template_body, parameter_overrides) -def _create_or_get_stack(cloudformation_client, stack_name, template_body): +# Todo Add _update_stack to handle the case when the values of the stack parameter got changed +def _create_or_get_stack( + cloudformation_client, + stack_name: str, + template_body: str, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> StackOutput: try: ds_resp = cloudformation_client.describe_stacks(StackName=stack_name) stacks = ds_resp["Stacks"] stack = stacks[0] click.echo("\n\tLooking for resources needed for deployment: Found!") - _check_sanity_of_stack(stack, stack_name) - return stack["Outputs"] + _check_sanity_of_stack(stack) + stack_outputs = cast(List[Dict[str, str]], stack["Outputs"]) + return StackOutput(stack_outputs) except ClientError: click.echo("\n\tLooking for resources needed for deployment: Not found.") try: stack = _create_stack( - cloudformation_client, stack_name, template_body + cloudformation_client, stack_name, template_body, parameter_overrides ) # exceptions are not captured from subcommands - _check_sanity_of_stack(stack, stack_name) - return stack["Outputs"] + _check_sanity_of_stack(stack) + stack_outputs = cast(List[Dict[str, str]], stack["Outputs"]) + return StackOutput(stack_outputs) except (ClientError, BotoCoreError) as ex: LOG.debug("Failed to create managed resources", exc_info=ex) raise ManagedStackError(str(ex)) from ex -def _check_sanity_of_stack(stack, stack_name): +def _check_sanity_of_stack(stack): + stack_name = stack.get("StackName") tags = stack.get("Tags", None) outputs = stack.get("Outputs", None) @@ -112,15 +153,23 @@ def _check_sanity_of_stack(stack, stack_name): raise UserException(msg) from ex -def _create_stack(cloudformation_client, stack_name, template_body): +def _create_stack( + cloudformation_client, + stack_name: str, + template_body: str, + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None, +): click.echo("\tCreating the required resources...") change_set_name = "InitialCreation" + parameters = _generate_stack_parameters(parameter_overrides) change_set_resp = cloudformation_client.create_change_set( StackName=stack_name, TemplateBody=template_body, Tags=[{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], ChangeSetType="CREATE", ChangeSetName=change_set_name, # this must be unique for the stack, but we only create so that's fine + Capabilities=["CAPABILITY_IAM"], + Parameters=parameters, ) stack_id = change_set_resp["StackId"] change_waiter = cloudformation_client.get_waiter("change_set_create_complete") @@ -134,3 +183,16 @@ def _create_stack(cloudformation_client, stack_name, template_body): stacks = ds_resp["Stacks"] click.echo("\tSuccessfully created!") return stacks[0] + + +def _generate_stack_parameters( + parameter_overrides: Optional[Dict[str, Union[str, List[str]]]] = None +) -> List[Dict[str, str]]: + parameters = [] + if parameter_overrides: + for key, value in parameter_overrides.items(): + if isinstance(value, Collection) and not isinstance(value, str): + # Assumption: values don't include commas or spaces. Need to refactor to handle such a case if needed. + value = ",".join(value) + parameters.append({"ParameterKey": key, "ParameterValue": value}) + return parameters diff --git a/samcli/lib/utils/profile.py b/samcli/lib/utils/profile.py new file mode 100644 index 0000000000..47d0242eee --- /dev/null +++ b/samcli/lib/utils/profile.py @@ -0,0 +1,10 @@ +""" +Module for aws profile related helpers +""" +from typing import List, cast + +from botocore.session import Session + + +def list_available_profiles() -> List[str]: + return cast(List[str], Session().available_profiles) diff --git a/samcli/yamlhelper.py b/samcli/yamlhelper.py index ca091e61cb..222c7b717e 100644 --- a/samcli/yamlhelper.py +++ b/samcli/yamlhelper.py @@ -18,7 +18,7 @@ # pylint: disable=too-many-ancestors import json -from typing import Dict, Optional +from typing import cast, Dict, Optional from botocore.compat import OrderedDict import yaml @@ -109,20 +109,20 @@ def _dict_constructor(loader, node): return OrderedDict(loader.construct_pairs(node)) -def yaml_parse(yamlstr): +def yaml_parse(yamlstr) -> Dict: """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. - return json.loads(yamlstr, object_pairs_hook=OrderedDict) + return cast(Dict, json.loads(yamlstr, object_pairs_hook=OrderedDict)) except ValueError: yaml.SafeLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor) yaml.SafeLoader.add_multi_constructor("!", intrinsics_multi_constructor) - return yaml.safe_load(yamlstr) + return cast(Dict, yaml.safe_load(yamlstr)) -def parse_yaml_file(file_path, extra_context: Optional[Dict] = None): +def parse_yaml_file(file_path, extra_context: Optional[Dict] = None) -> Dict: """ Read the file, do variable substitution, parse it as JSON/YAML diff --git a/tests/integration/pipeline/__init__.py b/tests/integration/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py new file mode 100644 index 0000000000..f82d27e357 --- /dev/null +++ b/tests/integration/pipeline/base.py @@ -0,0 +1,154 @@ +import os +import shutil +import logging +import uuid +from pathlib import Path +from typing import List, Optional, Set, Tuple, Any +from unittest import TestCase +from unittest.mock import Mock + +import boto3 +import botocore.exceptions +from botocore.exceptions import ClientError + +from samcli.lib.pipeline.bootstrap.stage import Stage + + +class PipelineBase(TestCase): + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + +class InitIntegBase(PipelineBase): + generated_files: List[Path] = [] + + @classmethod + def setUpClass(cls) -> None: + # we need to compare the whole generated template, which is + # larger than normal diff size limit + cls.maxDiff = None + + def setUp(self) -> None: + super().setUp() + self.generated_files = [] + + def tearDown(self) -> None: + for generated_file in self.generated_files: + if generated_file.is_dir(): + shutil.rmtree(generated_file, ignore_errors=True) + elif generated_file.exists(): + generated_file.unlink() + super().tearDown() + + def get_init_command_list(self, with_bootstrap=False): + command_list = [self.base_command(), "pipeline", "init"] + if with_bootstrap: + command_list.append("--bootstrap") + return command_list + + +class BootstrapIntegBase(PipelineBase): + region = "us-east-1" + stack_names: List[str] + cf_client: Any + randomized_stage_suffix: str + + @classmethod + def setUpClass(cls): + cls.cf_client = boto3.client("cloudformation", region_name=cls.region) + cls.randomized_stage_suffix = uuid.uuid4().hex[-6:] + + def setUp(self): + self.stack_names = [] + super().setUp() + shutil.rmtree(os.path.join(os.getcwd(), ".aws-sam", "pipeline"), ignore_errors=True) + + def tearDown(self): + for stack_name in self.stack_names: + self._cleanup_s3_buckets(stack_name) + self.cf_client.delete_stack(StackName=stack_name) + shutil.rmtree(os.path.join(os.getcwd(), ".aws-sam", "pipeline"), ignore_errors=True) + super().tearDown() + + def _cleanup_s3_buckets(self, stack_name): + try: + stack_resources = self.cf_client.describe_stack_resources(StackName=stack_name) + buckets = [ + resource + for resource in stack_resources["StackResources"] + if resource["ResourceType"] == "AWS::S3::Bucket" + ] + s3_client = boto3.client("s3") + for bucket in buckets: + s3_client.delete_bucket(Bucket=bucket.get("PhysicalResourceId")) + except botocore.exceptions.ClientError: + """No need to fail in cleanup""" + + def get_bootstrap_command_list( + self, + no_interactive: bool = False, + stage_name: Optional[str] = None, + profile_name: Optional[str] = None, + region: Optional[str] = None, + pipeline_user: Optional[str] = None, + pipeline_execution_role: Optional[str] = None, + cloudformation_execution_role: Optional[str] = None, + bucket: Optional[str] = None, + create_image_repository: bool = False, + image_repository: Optional[str] = None, + no_confirm_changeset: bool = False, + ): + command_list = [self.base_command(), "pipeline", "bootstrap"] + + if no_interactive: + command_list += ["--no-interactive"] + if stage_name: + command_list += ["--stage", stage_name] + if profile_name: + command_list += ["--profile", profile_name] + if region: + command_list += ["--region", region] + if pipeline_user: + command_list += ["--pipeline-user", pipeline_user] + if pipeline_execution_role: + command_list += ["--pipeline-execution-role", pipeline_execution_role] + if cloudformation_execution_role: + command_list += ["--cloudformation-execution-role", cloudformation_execution_role] + if bucket: + command_list += ["--bucket", bucket] + if create_image_repository: + command_list += ["--create-image-repository"] + if image_repository: + command_list += ["--image-repository", image_repository] + if no_confirm_changeset: + command_list += ["--no-confirm-changeset"] + + return command_list + + def _extract_created_resource_logical_ids(self, stack_name: str) -> List[str]: + response = self.cf_client.describe_stack_resources(StackName=stack_name) + return [resource["LogicalResourceId"] for resource in response["StackResources"]] + + def _stack_exists(self, stack_name) -> bool: + try: + self.cf_client.describe_stacks(StackName=stack_name) + return True + except ClientError as ex: + if "does not exist" in ex.response.get("Error", {}).get("Message", ""): + return False + raise ex + + def _get_stage_and_stack_name(self, suffix: str = "") -> Tuple[str, str]: + # Method expects method name which can be a full path. Eg: test.integration.test_bootstrap_command.method_name + method_name = self.id().split(".")[-1] + stage_name = method_name.replace("_", "-") + suffix + "-" + self.randomized_stage_suffix + + mock_env = Mock() + mock_env.name = stage_name + stack_name = Stage._get_stack_name(mock_env) + + return stage_name, stack_name diff --git a/tests/integration/pipeline/test_bootstrap_command.py b/tests/integration/pipeline/test_bootstrap_command.py new file mode 100644 index 0000000000..0cf7741c5c --- /dev/null +++ b/tests/integration/pipeline/test_bootstrap_command.py @@ -0,0 +1,380 @@ +from unittest import skipIf + +from parameterized import parameterized + +from samcli.commands.pipeline.bootstrap.cli import PIPELINE_CONFIG_FILENAME, PIPELINE_CONFIG_DIR +from samcli.lib.config.samconfig import SamConfig +from tests.integration.pipeline.base import BootstrapIntegBase +from tests.testing_utils import ( + run_command_with_input, + RUNNING_ON_CI, + RUNNING_TEST_FOR_MASTER_ON_CI, + RUN_BY_CANARY, + run_command, + run_command_with_inputs, +) +import boto3 +from botocore.exceptions import ClientError + +# bootstrap tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict tests to run outside of CI/CD, when the branch is not master or tests are not run by Canary +SKIP_BOOTSTRAP_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY + +# In order to run bootstrap integration test locally make sure your test account is configured as `default` account. +CREDENTIAL_PROFILE = "2" if not RUN_BY_CANARY else "1" + +CFN_OUTPUT_TO_CONFIG_KEY = { + "ArtifactsBucket": "artifacts_bucket", + "CloudFormationExecutionRole": "cloudformation_execution_role", + "PipelineExecutionRole": "pipeline_execution_role", + "PipelineUser": "pipeline_user", +} + + +@skipIf(SKIP_BOOTSTRAP_TESTS, "Skip bootstrap tests in CI/CD only") +class TestBootstrap(BootstrapIntegBase): + @parameterized.expand([("create_image_repository",), (False,)]) + def test_interactive_with_no_resources_provided(self, create_image_repository): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "y" if create_image_repository else "N", # Should we create ECR repo + ] + + if create_image_repository: + inputs.append("") # Create image repository + + inputs.append("") # Confirm summary + inputs.append("y") # Create resources + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + # make sure pipeline user's credential is printed + self.assertIn("ACCESS_KEY_ID", stdout) + self.assertIn("SECRET_ACCESS_KEY", stdout) + + common_resources = { + "PipelineUser", + "PipelineUserAccessKey", + "PipelineUserSecretKey", + "CloudFormationExecutionRole", + "PipelineExecutionRole", + "ArtifactsBucket", + "ArtifactsLoggingBucket", + "ArtifactsLoggingBucketPolicy", + "ArtifactsBucketPolicy", + "PipelineExecutionRolePermissionPolicy", + } + if create_image_repository: + self.assertSetEqual( + { + *common_resources, + "ImageRepository", + }, + set(self._extract_created_resource_logical_ids(stack_name)), + ) + CFN_OUTPUT_TO_CONFIG_KEY["ImageRepository"] = "image_repository" + self.validate_pipeline_config(stack_name, stage_name, list(CFN_OUTPUT_TO_CONFIG_KEY.keys())) + del CFN_OUTPUT_TO_CONFIG_KEY["ImageRepository"] + else: + self.assertSetEqual(common_resources, set(self._extract_created_resource_logical_ids(stack_name))) + self.validate_pipeline_config(stack_name, stage_name) + + @parameterized.expand([("create_image_repository",), (False,)]) + def test_non_interactive_with_no_resources_provided(self, create_image_repository): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + create_image_repository=create_image_repository, + no_confirm_changeset=True, + region=self.region, + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 2) + stderr = bootstrap_process_execute.stderr.decode() + self.assertIn("Missing required parameter", stderr) + + def test_interactive_with_all_required_resources_provided(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "arn:aws:iam::123:role/role-name", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Should we create ECR repo, 3 - specify one + "", + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("skipping creation", stdout) + + def test_no_interactive_with_all_required_resources_provided(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + stage_name=stage_name, + pipeline_user="arn:aws:iam::123:user/user-name", # pipeline user + pipeline_execution_role="arn:aws:iam::123:role/role-name", # Pipeline execution role + cloudformation_execution_role="arn:aws:iam::123:role/role-name", # CloudFormation execution role + bucket="arn:aws:s3:::bucket-name", # Artifacts bucket + image_repository="arn:aws:ecr:::repository/repo-name", # ecr repo + region=self.region, + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("skipping creation", stdout) + + def validate_pipeline_config(self, stack_name, stage_name, cfn_keys_to_check=None): + # Get output values from cloudformation + if cfn_keys_to_check is None: + cfn_keys_to_check = list(CFN_OUTPUT_TO_CONFIG_KEY.keys()) + response = self.cf_client.describe_stacks(StackName=stack_name) + stacks = response["Stacks"] + self.assertTrue(len(stacks) > 0) # in case stack name is invalid + stack_outputs = stacks[0]["Outputs"] + output_values = {} + for value in stack_outputs: + output_values[value["OutputKey"]] = value["OutputValue"] + + # Get values saved in config file + config = SamConfig(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + config_values = config.get_all(["pipeline", "bootstrap"], "parameters", stage_name) + config_values = {**config_values, **config.get_all(["pipeline", "bootstrap"], "parameters")} + + for key in CFN_OUTPUT_TO_CONFIG_KEY: + if key not in cfn_keys_to_check: + continue + value = CFN_OUTPUT_TO_CONFIG_KEY[key] + cfn_value = output_values[key] + config_value = config_values[value] + if key == "ImageRepository": + self.assertEqual(cfn_value.split("/")[-1], config_value.split("/")[-1]) + else: + self.assertTrue(cfn_value.endswith(config_value) or cfn_value == config_value) + + @parameterized.expand([("confirm_changeset",), (False,)]) + def test_no_interactive_with_some_required_resources_provided(self, confirm_changeset: bool): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + stage_name=stage_name, + pipeline_user="arn:aws:iam::123:user/user-name", # pipeline user + pipeline_execution_role="arn:aws:iam::123:role/role-name", # Pipeline execution role + # CloudFormation execution role missing + bucket="arn:aws:s3:::bucket-name", # Artifacts bucket + image_repository="arn:aws:ecr:::repository/repo-name", # ecr repo + no_confirm_changeset=not confirm_changeset, + region=self.region, + ) + + inputs = [ + "y", # proceed + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs if confirm_changeset else []) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("Successfully created!", stdout) + self.assertIn("CloudFormationExecutionRole", self._extract_created_resource_logical_ids(stack_name)) + + def test_interactive_cancelled_by_user(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Do you have Lambda with package type Image + "", + "", # Create resources confirmation + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertTrue(stdout.strip().endswith("Canceling pipeline bootstrap creation.")) + self.assertFalse(self._stack_exists(stack_name)) + + def test_interactive_with_some_required_resources_provided(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Do you have Lambda with package type Image + "", + "y", # Create resources confirmation + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("Successfully created!", stdout) + # make sure the not provided resource is the only resource created. + self.assertIn("CloudFormationExecutionRole", self._extract_created_resource_logical_ids(stack_name)) + self.validate_pipeline_config(stack_name, stage_name) + + def test_interactive_pipeline_user_only_created_once(self): + """ + Create 3 stages, only the first stage resource stack creates + a pipeline user, and the remaining two share the same pipeline user. + """ + stage_names = [] + for suffix in ["1", "2", "3"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + bootstrap_command_list = self.get_bootstrap_command_list() + + for i, stage_name in enumerate(stage_names): + inputs = [ + stage_name, + CREDENTIAL_PROFILE, + self.region, # region + *([""] if i == 0 else []), # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "arn:aws:iam::123:role/role-name", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "N", # Should we create ECR repo, 3 - specify one + "", + "y", # Create resources confirmation + ] + + bootstrap_process_execute = run_command_with_input( + bootstrap_command_list, ("\n".join(inputs) + "\n").encode() + ) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + + # Only first environment creates pipeline user + if i == 0: + self.assertIn("The following resources were created in your account:", stdout) + resources = self._extract_created_resource_logical_ids(self.stack_names[i]) + self.assertTrue("PipelineUser" in resources) + self.assertTrue("PipelineUserAccessKey" in resources) + self.assertTrue("PipelineUserSecretKey" in resources) + self.validate_pipeline_config(self.stack_names[i], stage_name) + else: + self.assertIn("skipping creation", stdout) + + @parameterized.expand([("ArtifactsBucket",), ("ArtifactsLoggingBucket",)]) + def test_bootstrapped_buckets_accept_ssl_requests_only(self, bucket_logical_id): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + stage_name=stage_name, no_interactive=True, no_confirm_changeset=True, region=self.region + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + + stack_resources = self.cf_client.describe_stack_resources(StackName=stack_name) + bucket = next( + resource + for resource in stack_resources["StackResources"] + if resource["LogicalResourceId"] == bucket_logical_id + ) + bucket_name = bucket["PhysicalResourceId"] + bucket_key = "any/testing/key.txt" + testing_data = b"any testing binary data" + + s3_ssl_client = boto3.client("s3", region_name=self.region) + s3_non_ssl_client = boto3.client("s3", use_ssl=False, region_name=self.region) + + # Assert SSL requests are accepted + s3_ssl_client.put_object(Body=testing_data, Bucket=bucket_name, Key=bucket_key) + res = s3_ssl_client.get_object(Bucket=bucket_name, Key=bucket_key) + retrieved_data = res["Body"].read() + self.assertEqual(retrieved_data, testing_data) + + # Assert non SSl requests are denied + with self.assertRaises(ClientError) as error: + s3_non_ssl_client.get_object(Bucket=bucket_name, Key=bucket_key) + self.assertEqual( + str(error.exception), "An error occurred (AccessDenied) when calling the GetObject operation: Access Denied" + ) + + def test_bootstrapped_artifacts_bucket_has_server_access_log_enabled(self): + stage_name, stack_name = self._get_stage_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + stage_name=stage_name, no_interactive=True, no_confirm_changeset=True, region=self.region + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + + stack_resources = self.cf_client.describe_stack_resources(StackName=stack_name) + artifacts_bucket = next( + resource + for resource in stack_resources["StackResources"] + if resource["LogicalResourceId"] == "ArtifactsBucket" + ) + artifacts_bucket_name = artifacts_bucket["PhysicalResourceId"] + artifacts_logging_bucket = next( + resource + for resource in stack_resources["StackResources"] + if resource["LogicalResourceId"] == "ArtifactsLoggingBucket" + ) + artifacts_logging_bucket_name = artifacts_logging_bucket["PhysicalResourceId"] + + s3_client = boto3.client("s3", region_name=self.region) + res = s3_client.get_bucket_logging(Bucket=artifacts_bucket_name) + self.assertEqual(artifacts_logging_bucket_name, res["LoggingEnabled"]["TargetBucket"]) diff --git a/tests/integration/pipeline/test_init_command.py b/tests/integration/pipeline/test_init_command.py new file mode 100644 index 0000000000..182184a999 --- /dev/null +++ b/tests/integration/pipeline/test_init_command.py @@ -0,0 +1,299 @@ +import os.path +import shutil +from pathlib import Path +from textwrap import dedent +from typing import List +from unittest import skipIf + +from parameterized import parameterized + +from samcli.commands.pipeline.bootstrap.cli import PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME +from tests.integration.pipeline.base import InitIntegBase, BootstrapIntegBase +from tests.integration.pipeline.test_bootstrap_command import SKIP_BOOTSTRAP_TESTS, CREDENTIAL_PROFILE +from tests.testing_utils import run_command_with_inputs + +QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "", + "credential-id", + "main", + "template.yaml", + "test", + "test-stack", + "test-pipeline-execution-role", + "test-cfn-execution-role", + "test-bucket", + "test-ecr", + "us-east-2", + "prod", + "prod-stack", + "prod-pipeline-execution-role", + "prod-cfn-execution-role", + "prod-bucket", + "prod-ecr", + "us-west-2", +] + + +class TestInit(InitIntegBase): + """ + Here we use Jenkins template for testing + """ + + def setUp(self) -> None: + # make sure there is no pipelineconfig.toml, otherwise the autofill could affect the question flow + pipelineconfig_file = Path(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + if pipelineconfig_file.exists(): + pipelineconfig_file.unlink() + + def tearDown(self) -> None: + super().tearDown() + shutil.rmtree(PIPELINE_CONFIG_DIR, ignore_errors=True) + + def test_quick_start(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + def test_failed_when_generated_file_already_exist_override(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + generated_jenkinsfile_path.touch() # the file now pre-exists + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs( + init_command_list, [*QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL, "y"] + ) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + def test_failed_when_generated_file_already_exist_not_override(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + generated_jenkinsfile_path.touch() # the file now pre-exists + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs( + init_command_list, [*QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL, ""] + ) + + self.assertEqual(init_process_execute.process.returncode, 0) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open( + os.path.join(".aws-sam", "pipeline", "generated-files", "Jenkinsfile"), "r" + ) as output: + self.assertEqual(expected.read(), output.read()) + + # also check the Jenkinsfile is not overridden + self.assertEqual("", open("Jenkinsfile", "r").read()) + + def test_custom_template(self): + generated_file = Path("weather") + self.generated_files.append(generated_file) + + custom_template_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "custom_template")) + inputs = ["2", str(custom_template_path), "", "Rainy"] # custom template + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, inputs) + + self.assertEqual(init_process_execute.process.returncode, 0) + + self.assertTrue(generated_file.exists()) + + with open(generated_file, "r") as f: + self.assertEqual("Rainy\n", f.read()) + + @parameterized.expand([("with_bootstrap",), (False,)]) + def test_with_pipelineconfig_has_all_stage_values(self, with_bootstrap): + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + Path(PIPELINE_CONFIG_DIR).mkdir(parents=True, exist_ok=True) + pipelineconfig_path = Path(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + with open(pipelineconfig_path, "w") as f: + f.write( + dedent( + """\ + version = 0.1 + [default] + [default.pipeline_bootstrap] + [default.pipeline_bootstrap.parameters] + pipeline_user = "arn:aws:iam::123:user/aws-sam-cli-managed-test-pipeline-res-PipelineUser-123" + + [test] + [test.pipeline_bootstrap] + [test.pipeline_bootstrap.parameters] + pipeline_execution_role = "test-pipeline-execution-role" + cloudformation_execution_role = "test-cfn-execution-role" + artifacts_bucket = "test-bucket" + image_repository = "test-ecr" + region = "us-east-2" + + [prod] + [prod.pipeline_bootstrap] + [prod.pipeline_bootstrap.parameters] + pipeline_execution_role = "prod-pipeline-execution-role" + cloudformation_execution_role = "prod-cfn-execution-role" + artifacts_bucket = "prod-bucket" + image_repository = "prod-ecr" + region = "us-west-2" + """ + ) + ) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + + init_command_list = self.get_init_command_list(with_bootstrap) + init_process_execute = run_command_with_inputs(init_command_list, inputs) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + +@skipIf(SKIP_BOOTSTRAP_TESTS, "Skip bootstrap tests in CI/CD only") +class TestInitWithBootstrap(BootstrapIntegBase): + generated_files: List[Path] = [] + + def setUp(self): + super().setUp() + self.command_list = [self.base_command(), "pipeline", "init", "--bootstrap"] + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + def tearDown(self) -> None: + for generated_file in self.generated_files: + if generated_file.is_dir(): + shutil.rmtree(generated_file, ignore_errors=True) + elif generated_file.exists(): + generated_file.unlink() + super().tearDown() + + def test_without_stages_in_pipeline_config(self): + stage_names = [] + for suffix in ["1", "2"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "y", # Do you want to go through stage setup process now? + stage_names[0], + CREDENTIAL_PROFILE, + self.region, + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "y", # Do you want to go through stage setup process now? + stage_names[1], + CREDENTIAL_PROFILE, + self.region, + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + init_process_execute = run_command_with_inputs(self.command_list, inputs) + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertIn("Here are the stage names detected", init_process_execute.stdout.decode()) + self.assertIn(stage_names[0], init_process_execute.stdout.decode()) + self.assertIn(stage_names[1], init_process_execute.stdout.decode()) + + def test_with_one_stages_in_pipeline_config(self): + stage_names = [] + for suffix in ["1", "2"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_names[0], + CREDENTIAL_PROFILE, + self.region, # region + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no + "", # Confirm summary + "y", # Create resources + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "y", # Do you want to go through stage setup process now? + stage_names[1], + CREDENTIAL_PROFILE, + self.region, + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + init_process_execute = run_command_with_inputs(self.command_list, inputs) + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertIn("Here are the stage names detected", init_process_execute.stdout.decode()) + self.assertIn(stage_names[0], init_process_execute.stdout.decode()) + self.assertIn(stage_names[1], init_process_execute.stdout.decode()) diff --git a/tests/integration/testdata/pipeline/custom_template/cookiecutter.json b/tests/integration/testdata/pipeline/custom_template/cookiecutter.json new file mode 100644 index 0000000000..c02b7caed1 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/cookiecutter.json @@ -0,0 +1,4 @@ +{ + "outputDir": "aws-sam-pipeline", + "weather": "" +} \ No newline at end of file diff --git a/tests/integration/testdata/pipeline/custom_template/metadata.json b/tests/integration/testdata/pipeline/custom_template/metadata.json new file mode 100644 index 0000000000..689fe297f8 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/metadata.json @@ -0,0 +1,3 @@ +{ + "number_of_stages": 0 +} diff --git a/tests/integration/testdata/pipeline/custom_template/questions.json b/tests/integration/testdata/pipeline/custom_template/questions.json new file mode 100644 index 0000000000..a0fe2167bf --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/questions.json @@ -0,0 +1,7 @@ +{ + "questions": [{ + "key": "weather", + "question": "How is the weather today?", + "default": "Sunny" + }] +} \ No newline at end of file diff --git a/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather b/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather new file mode 100644 index 0000000000..3501ffd0ae --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather @@ -0,0 +1 @@ +{{cookiecutter.weather}} diff --git a/tests/integration/testdata/pipeline/expected_jenkinsfile b/tests/integration/testdata/pipeline/expected_jenkinsfile new file mode 100644 index 0000000000..7a213a30f9 --- /dev/null +++ b/tests/integration/testdata/pipeline/expected_jenkinsfile @@ -0,0 +1,177 @@ +pipeline { + agent any + environment { + PIPELINE_USER_CREDENTIAL_ID = 'credential-id' + SAM_TEMPLATE = 'template.yaml' + MAIN_BRANCH = 'main' + TESTING_STACK_NAME = 'test-stack' + TESTING_PIPELINE_EXECUTION_ROLE = 'test-pipeline-execution-role' + TESTING_CLOUDFORMATION_EXECUTION_ROLE = 'test-cfn-execution-role' + TESTING_ARTIFACTS_BUCKET = 'test-bucket' + TESTING_IMAGE_REPOSITORY = 'test-ecr' + TESTING_REGION = 'us-east-2' + PROD_STACK_NAME = 'prod-stack' + PROD_PIPELINE_EXECUTION_ROLE = 'prod-pipeline-execution-role' + PROD_CLOUDFORMATION_EXECUTION_ROLE = 'prod-cfn-execution-role' + PROD_ARTIFACTS_BUCKET = 'prod-bucket' + PROD_IMAGE_REPOSITORY = 'prod-ecr' + PROD_REGION = 'us-west-2' + } + stages { + // uncomment and modify the following step for running the unit-tests + // stage('test') { + // steps { + // sh ''' + // # trigger the tests here + // ''' + // } + // } + + stage('build-and-deploy-feature') { + // this stage is triggered only for feature branches (feature*), + // which will build the stack and deploy to a stack named with branch name. + when { + branch 'feature*' + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + args '--user 0:0 -v /var/run/docker.sock:/var/run/docker.sock' + } + } + steps { + sh 'sam build --template ${SAM_TEMPLATE} --use-container' + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'deploying-feature') { + sh ''' + sam deploy --stack-name $(echo ${BRANCH_NAME} | tr -cd '[a-zA-Z0-9-]') \ + --capabilities CAPABILITY_IAM \ + --region ${TESTING_REGION} \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${TESTING_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + + stage('build-and-package') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + args '--user 0:0 -v /var/run/docker.sock:/var/run/docker.sock' + } + } + steps { + sh 'sam build --template ${SAM_TEMPLATE} --use-container' + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'testing-packaging') { + sh ''' + sam package \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --region ${TESTING_REGION} \ + --output-template-file packaged-testing.yaml + ''' + } + + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.PROD_REGION, + role: env.PROD_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'prod-packaging') { + sh ''' + sam package \ + --s3-bucket ${PROD_ARTIFACTS_BUCKET} \ + --image-repository ${PROD_IMAGE_REPOSITORY} \ + --region ${PROD_REGION} \ + --output-template-file packaged-prod.yaml + ''' + } + + archiveArtifacts artifacts: 'packaged-testing.yaml' + archiveArtifacts artifacts: 'packaged-prod.yaml' + } + } + + stage('deploy-testing') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + } + } + steps { + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'testing-deployment') { + sh ''' + sam deploy --stack-name ${TESTING_STACK_NAME} \ + --template packaged-testing.yaml \ + --capabilities CAPABILITY_IAM \ + --region ${TESTING_REGION} \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${TESTING_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + + // uncomment and modify the following step for running the integration-tests + // stage('integration-test') { + // when { + // branch env.MAIN_BRANCH + // } + // steps { + // sh ''' + // # trigger the integration tests here + // ''' + // } + // } + + stage('deploy-prod') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + } + } + steps { + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.PROD_REGION, + role: env.PROD_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'prod-deployment') { + sh ''' + sam deploy --stack-name ${PROD_STACK_NAME} \ + --template packaged-prod.yaml \ + --capabilities CAPABILITY_IAM \ + --region ${PROD_REGION} \ + --s3-bucket ${PROD_ARTIFACTS_BUCKET} \ + --image-repository ${PROD_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${PROD_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + } +} diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 0cc7aa3067..78da67ab0c 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -5,6 +5,7 @@ import shutil from collections import namedtuple from subprocess import Popen, PIPE, TimeoutExpired +from typing import List IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) @@ -50,6 +51,10 @@ def run_command_with_input(command_list, stdin_input, timeout=TIMEOUT) -> Comman raise +def run_command_with_inputs(command_list: List[str], inputs: List[str], timeout=TIMEOUT) -> CommandResult: + return run_command_with_input(command_list, ("\n".join(inputs) + "\n").encode(), timeout) + + class FileCreator(object): def __init__(self): self.rootdir = tempfile.mkdtemp() diff --git a/tests/unit/commands/_utils/test_template.py b/tests/unit/commands/_utils/test_template.py index be4001be68..1de707ec38 100644 --- a/tests/unit/commands/_utils/test_template.py +++ b/tests/unit/commands/_utils/test_template.py @@ -1,12 +1,10 @@ -import os import copy +import os +from unittest import TestCase +from unittest.mock import patch, mock_open, MagicMock -import jmespath import yaml from botocore.utils import set_value_from_jmespath - -from unittest import TestCase -from unittest.mock import patch, mock_open, MagicMock from parameterized import parameterized, param from samcli.commands._utils.resources import AWS_SERVERLESS_FUNCTION, AWS_SERVERLESS_API diff --git a/tests/unit/commands/deploy/test_guided_context.py b/tests/unit/commands/deploy/test_guided_context.py index 6e49b73a60..7b31ff60eb 100644 --- a/tests/unit/commands/deploy/test_guided_context.py +++ b/tests/unit/commands/deploy/test_guided_context.py @@ -666,7 +666,7 @@ def test_guided_prompts_with_code_signing( expected_code_sign_calls = expected_code_sign_calls * (number_of_functions + number_of_layers) self.assertEqual(expected_code_sign_calls, patched_code_signer_prompt.call_args_list) - @patch("samcli.commands.deploy.guided_context.get_session") + @patch("samcli.commands.deploy.guided_context.get_default_aws_region") @patch("samcli.commands.deploy.guided_context.prompt") @patch("samcli.commands.deploy.guided_context.confirm") @patch("samcli.commands.deploy.guided_context.manage_stack") @@ -685,7 +685,7 @@ def test_guided_prompts_check_default_config_region( patched_manage_stack, patched_confirm, patched_prompt, - patched_get_session, + patched_get_default_aws_region, ): patched_sam_function_provider.return_value = {} patched_get_template_artifacts_format.return_value = [ZIP] @@ -695,7 +695,7 @@ def test_guided_prompts_check_default_config_region( patched_confirm.side_effect = [True, False, True, True, ""] patched_signer_config_per_function.return_value = ({}, {}) patched_manage_stack.return_value = "managed_s3_stack" - patched_get_session.return_value.get_config_variable.return_value = "default_config_region" + patched_get_default_aws_region.return_value = "default_config_region" # setting the default region to None self.gc.region = None self.gc.guided_prompts(parameter_override_keys=None) diff --git a/tests/unit/commands/pipeline/__init__.py b/tests/unit/commands/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/pipeline/bootstrap/__init__.py b/tests/unit/commands/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/pipeline/bootstrap/test_cli.py b/tests/unit/commands/pipeline/bootstrap/test_cli.py new file mode 100644 index 0000000000..649fbbdf32 --- /dev/null +++ b/tests/unit/commands/pipeline/bootstrap/test_cli.py @@ -0,0 +1,276 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +import click +from click.testing import CliRunner + +from samcli.commands.pipeline.bootstrap.cli import ( + _load_saved_pipeline_user_arn, + _get_bootstrap_command_names, + PIPELINE_CONFIG_FILENAME, + PIPELINE_CONFIG_DIR, +) +from samcli.commands.pipeline.bootstrap.cli import cli as bootstrap_cmd +from samcli.commands.pipeline.bootstrap.cli import do_cli as bootstrap_cli + +ANY_REGION = "ANY_REGION" +ANY_PROFILE = "ANY_PROFILE" +ANY_STAGE_NAME = "ANY_STAGE_NAME" +ANY_PIPELINE_USER_ARN = "ANY_PIPELINE_USER_ARN" +ANY_PIPELINE_EXECUTION_ROLE_ARN = "ANY_PIPELINE_EXECUTION_ROLE_ARN" +ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN = "ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN" +ANY_ARTIFACTS_BUCKET_ARN = "ANY_ARTIFACTS_BUCKET_ARN" +ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" +ANY_ARN = "ANY_ARN" +ANY_CONFIG_FILE = "ANY_CONFIG_FILE" +ANY_CONFIG_ENV = "ANY_CONFIG_ENV" +PIPELINE_BOOTSTRAP_COMMAND_NAMES = ["pipeline", "bootstrap"] + + +class TestCli(TestCase): + def setUp(self) -> None: + self.cli_context = { + "region": ANY_REGION, + "profile": ANY_PROFILE, + "interactive": True, + "stage_name": ANY_STAGE_NAME, + "pipeline_user_arn": ANY_PIPELINE_USER_ARN, + "pipeline_execution_role_arn": ANY_PIPELINE_EXECUTION_ROLE_ARN, + "cloudformation_execution_role_arn": ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + "artifacts_bucket_arn": ANY_ARTIFACTS_BUCKET_ARN, + "create_image_repository": True, + "image_repository_arn": ANY_IMAGE_REPOSITORY_ARN, + "confirm_changeset": True, + "config_file": ANY_CONFIG_FILE, + "config_env": ANY_CONFIG_ENV, + } + + @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") + def test_bootstrap_command_default_argument_values(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke(bootstrap_cmd) + # Test the defaults are as following: + # interactive -> True + # create_image_repository -> False + # confirm_changeset -> True + # region, profile, stage_name and all ARNs are None + do_cli_mock.assert_called_once_with( + region=None, + profile=None, + interactive=True, + stage_name=None, + pipeline_user_arn=None, + pipeline_execution_role_arn=None, + cloudformation_execution_role_arn=None, + artifacts_bucket_arn=None, + create_image_repository=False, + image_repository_arn=None, + confirm_changeset=True, + config_file="default", + config_env="samconfig.toml", + ) + + @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") + def test_bootstrap_command_flag_arguments(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke(bootstrap_cmd, args=["--interactive", "--no-create-image-repository", "--confirm-changeset"]) + args, kwargs = do_cli_mock.call_args + self.assertTrue(kwargs["interactive"]) + self.assertFalse(kwargs["create_image_repository"]) + self.assertTrue(kwargs["confirm_changeset"]) + + runner.invoke(bootstrap_cmd, args=["--no-interactive", "--create-image-repository", "--no-confirm-changeset"]) + args, kwargs = do_cli_mock.call_args + self.assertFalse(kwargs["interactive"]) + self.assertTrue(kwargs["create_image_repository"]) + self.assertFalse(kwargs["confirm_changeset"]) + + @patch("samcli.commands.pipeline.bootstrap.cli.do_cli") + def test_bootstrap_command_with_different_arguments_combination(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke( + bootstrap_cmd, + args=["--no-interactive", "--stage", "environment1", "--bucket", "bucketARN"], + ) + args, kwargs = do_cli_mock.call_args + self.assertFalse(kwargs["interactive"]) + self.assertEqual(kwargs["stage_name"], "environment1") + self.assertEqual(kwargs["artifacts_bucket_arn"], "bucketARN") + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrapping_normal_interactive_flow( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + # setup + gc_instance = Mock() + guided_context_mock.return_value = gc_instance + environment_instance = Mock() + environment_mock.return_value = environment_instance + load_saved_pipeline_user_arn_mock.return_value = ANY_PIPELINE_USER_ARN + self.cli_context["interactive"] = True + self.cli_context["pipeline_user_arn"] = None + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + + # trigger + bootstrap_cli(**self.cli_context) + + # verify + load_saved_pipeline_user_arn_mock.assert_called_once() + gc_instance.run.assert_called_once() + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + environment_instance.print_resources_summary.assert_called_once() + environment_instance.save_config_safe.assert_called_once_with( + config_dir=PIPELINE_CONFIG_DIR, + filename=PIPELINE_CONFIG_FILENAME, + cmd_names=PIPELINE_BOOTSTRAP_COMMAND_NAMES, + ) + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrap_will_not_try_loading_pipeline_user_if_already_provided( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + bootstrap_cli(**self.cli_context) + load_saved_pipeline_user_arn_mock.assert_not_called() + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrap_will_try_loading_pipeline_user_if_not_provided( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + self.cli_context["pipeline_user_arn"] = None + bootstrap_cli(**self.cli_context) + load_saved_pipeline_user_arn_mock.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_stage_name_is_required_to_be_provided_in_case_of_non_interactive_mode( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + self.cli_context["interactive"] = False + self.cli_context["stage_name"] = None + with self.assertRaises(click.UsageError): + bootstrap_cli(**self.cli_context) + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_stage_name_is_not_required_to_be_provided_in_case_of_interactive_mode( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + self.cli_context["interactive"] = True + self.cli_context["stage_name"] = None + bootstrap_cli(**self.cli_context) # No exception is thrown + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_guided_context_will_be_enabled_or_disabled_based_on_the_interactive_mode( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + gc_instance = Mock() + guided_context_mock.return_value = gc_instance + self.cli_context["interactive"] = False + bootstrap_cli(**self.cli_context) + gc_instance.run.assert_not_called() + self.cli_context["interactive"] = True + bootstrap_cli(**self.cli_context) + gc_instance.run.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + @patch("samcli.commands.pipeline.bootstrap.cli._load_saved_pipeline_user_arn") + @patch("samcli.commands.pipeline.bootstrap.cli.Stage") + @patch("samcli.commands.pipeline.bootstrap.cli.GuidedContext") + def test_bootstrapping_will_confirm_before_creating_the_resources_unless_the_user_choose_not_to( + self, guided_context_mock, environment_mock, load_saved_pipeline_user_arn_mock, get_command_names_mock + ): + environment_instance = Mock() + environment_mock.return_value = environment_instance + self.cli_context["confirm_changeset"] = False + bootstrap_cli(**self.cli_context) + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=False) + environment_instance.bootstrap.reset_mock() + self.cli_context["confirm_changeset"] = True + bootstrap_cli(**self.cli_context) + environment_instance.bootstrap.assert_called_once_with(confirm_changeset=True) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_will_read_from_the_correct_file( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = False + + # trigger + _load_saved_pipeline_user_arn() + + # verify + sam_config_mock.assert_called_once_with(config_dir=PIPELINE_CONFIG_DIR, filename=PIPELINE_CONFIG_FILENAME) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_is_not_found( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = False + + # trigger + pipeline_user_arn = _load_saved_pipeline_user_arn() + + # verify + self.assertIsNone(pipeline_user_arn) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_will_return_non_if_the_pipeline_toml_file_does_not_contain_pipeline_user( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = True + sam_config_instance_mock.get_all.return_value = {"non-pipeline_user-key": "any_value"} + + # trigger + pipeline_user_arn = _load_saved_pipeline_user_arn() + + # verify + self.assertIsNone(pipeline_user_arn) + + @patch("samcli.commands.pipeline.bootstrap.cli.SamConfig") + @patch("samcli.commands.pipeline.bootstrap.cli._get_bootstrap_command_names") + def test_load_saved_pipeline_user_arn_returns_the_pipeline_user_arn_from_the_pipeline_toml_file( + self, get_command_names_mock, sam_config_mock + ): + # setup + get_command_names_mock.return_value = PIPELINE_BOOTSTRAP_COMMAND_NAMES + sam_config_instance_mock = Mock() + sam_config_mock.return_value = sam_config_instance_mock + sam_config_instance_mock.exists.return_value = True + sam_config_instance_mock.get_all.return_value = {"pipeline_user": ANY_PIPELINE_USER_ARN} + + # trigger + pipeline_user_arn = _load_saved_pipeline_user_arn() + + # verify + self.assertEqual(pipeline_user_arn, ANY_PIPELINE_USER_ARN) diff --git a/tests/unit/commands/pipeline/bootstrap/test_guided_context.py b/tests/unit/commands/pipeline/bootstrap/test_guided_context.py new file mode 100644 index 0000000000..c4c11e9792 --- /dev/null +++ b/tests/unit/commands/pipeline/bootstrap/test_guided_context.py @@ -0,0 +1,231 @@ +from unittest import TestCase +from unittest.mock import patch, Mock, ANY + +from parameterized import parameterized + +from samcli.commands.pipeline.bootstrap.guided_context import GuidedContext + +ANY_STAGE_NAME = "ANY_STAGE_NAME" +ANY_PIPELINE_USER_ARN = "ANY_PIPELINE_USER_ARN" +ANY_PIPELINE_EXECUTION_ROLE_ARN = "ANY_PIPELINE_EXECUTION_ROLE_ARN" +ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN = "ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN" +ANY_ARTIFACTS_BUCKET_ARN = "ANY_ARTIFACTS_BUCKET_ARN" +ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" +ANY_ARN = "ANY_ARN" +ANY_REGION = "us-east-2" + + +class TestGuidedContext(TestCase): + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_not_prompt_for_fields_that_are_already_provided( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + gc: GuidedContext = GuidedContext( + stage_name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + region=ANY_REGION, + ) + gc.run() + # there should only two prompt to ask + # 1. which account to use (mocked in _prompt_account_id(), not contributing to count) + # 2. what values customers want to change + prompt_account_id_mock.assert_called_once() + click_mock.prompt.assert_called_once() + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_prompt_for_fields_that_are_not_provided( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + click_mock.confirm.return_value = False + click_mock.prompt = Mock(return_value="0") + gc: GuidedContext = GuidedContext( + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN # Exclude ECR repo, it has its own detailed test below + ) + gc.run() + prompt_account_id_mock.assert_called_once() + self.assertTrue(self.did_prompt_text_like("Stage Name", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Pipeline IAM user", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Pipeline execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("CloudFormation execution role", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("Artifact bucket", click_mock.prompt)) + self.assertTrue(self.did_prompt_text_like("region", click_mock.prompt)) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.GuidedContext._prompt_account_id") + def test_guided_context_will_not_prompt_for_not_provided_image_repository_if_no_image_repository_is_required( + self, prompt_account_id_mock, click_mock, account_id_mock + ): + account_id_mock.return_value = "1234567890" + # ECR Image Repository choices: + # 1 - No, My SAM Template won't include lambda functions of Image package-type + # 2 - Yes, I need a help creating one + # 3 - I already have an ECR image repository + gc_without_ecr_info: GuidedContext = GuidedContext( + stage_name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + ) + + self.assertIsNone(gc_without_ecr_info.image_repository_arn) + + click_mock.confirm.return_value = False # the user chose to not CREATE an ECR Image repository + click_mock.prompt.side_effect = [None, "0"] + gc_without_ecr_info.run() + self.assertIsNone(gc_without_ecr_info.image_repository_arn) + self.assertFalse(gc_without_ecr_info.create_image_repository) + self.assertFalse(self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt)) + + click_mock.confirm.return_value = True # the user chose to CREATE an ECR Image repository + click_mock.prompt.side_effect = [None, None, "0"] + gc_without_ecr_info.run() + self.assertIsNone(gc_without_ecr_info.image_repository_arn) + self.assertTrue(gc_without_ecr_info.create_image_repository) + self.assertTrue(self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt)) + + click_mock.confirm.return_value = True # the user already has a repo + click_mock.prompt.side_effect = [None, ANY_IMAGE_REPOSITORY_ARN, "0"] + gc_without_ecr_info.run() + self.assertFalse(gc_without_ecr_info.create_image_repository) + self.assertTrue( + self.did_prompt_text_like("Please enter the ECR image repository", click_mock.prompt) + ) # we've asked about it + self.assertEqual(gc_without_ecr_info.image_repository_arn, ANY_IMAGE_REPOSITORY_ARN) + + @staticmethod + def did_prompt_text_like(txt, click_prompt_mock): + txt = txt.lower() + for kall in click_prompt_mock.call_args_list: + args, kwargs = kall + if args: + text = args[0].lower() + else: + text = kwargs.get("text", "").lower() + if txt in text: + return True + return False + + +class TestGuidedContext_prompt_account_id(TestCase): + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_can_display_profiles_and_environment( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "1" # select environment variable + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + click_mock.prompt.assert_called_once_with( + ANY, show_choices=False, show_default=False, type=click_mock.Choice(["1", "2", "3", "q"]) + ) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_wont_show_environment_option_when_it_doesnt_exist( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock + ): + getenv_mock.return_value = None + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "1" # select environment variable + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + click_mock.prompt.assert_called_once_with( + ANY, show_choices=False, show_default=False, type=click_mock.Choice(["2", "3", "q"]) + ) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_select_environment_unset_self_profile( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "1" # select environment variable + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + self.assertEquals(None, guided_context_mock.profile) + + @parameterized.expand( + [ + ( + "2", + "profile1", + ), + ( + "3", + "profile2", + ), + ] + ) + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_select_profile_set_profile_to_its_name( + self, + profile_selection, + expected_profile, + list_available_profiles_mock, + getenv_mock, + click_mock, + get_current_account_id_mock, + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = profile_selection + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + self.assertEquals(expected_profile, guided_context_mock.profile) + + @patch("samcli.commands.pipeline.bootstrap.guided_context.sys.exit") + @patch("samcli.commands.pipeline.bootstrap.guided_context.get_current_account_id") + @patch("samcli.commands.pipeline.bootstrap.guided_context.click") + @patch("samcli.commands.pipeline.bootstrap.guided_context.os.getenv") + @patch("samcli.commands.pipeline.bootstrap.guided_context.list_available_profiles") + def test_prompt_account_id_select_quit( + self, list_available_profiles_mock, getenv_mock, click_mock, get_current_account_id_mock, exit_mock + ): + getenv_mock.return_value = "not None" + list_available_profiles_mock.return_value = ["profile1", "profile2"] + click_mock.prompt.return_value = "q" # quit + get_current_account_id_mock.return_value = "account_id" + + guided_context_mock = Mock() + GuidedContext._prompt_account_id(guided_context_mock) + + exit_mock.assert_called_once_with(0) diff --git a/tests/unit/commands/pipeline/init/__init__.py b/tests/unit/commands/pipeline/init/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/pipeline/init/test_cli.py b/tests/unit/commands/pipeline/init/test_cli.py new file mode 100644 index 0000000000..2e7cd0699b --- /dev/null +++ b/tests/unit/commands/pipeline/init/test_cli.py @@ -0,0 +1,22 @@ +from unittest import TestCase +from unittest.mock import patch + +from click.testing import CliRunner + +from samcli.commands.pipeline.init.cli import cli as init_cmd +from samcli.commands.pipeline.init.cli import do_cli as init_cli + + +class TestCli(TestCase): + @patch("samcli.commands.pipeline.init.cli.do_cli") + def test_cli_default_flow(self, do_cli_mock): + runner: CliRunner = CliRunner() + runner.invoke(init_cmd) + # Currently we support the interactive mode only, i.e. we don't accept any command arguments, + # instead we ask the user about the required arguments in an interactive way + do_cli_mock.assert_called_once_with(False) # Called without arguments + + @patch("samcli.commands.pipeline.init.cli.InteractiveInitFlow.do_interactive") + def test_do_cli(self, do_interactive_mock): + init_cli(False) + do_interactive_mock.assert_called_once_with() # Called without arguments diff --git a/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py new file mode 100644 index 0000000000..2cdaacc91e --- /dev/null +++ b/tests/unit/commands/pipeline/init/test_initeractive_init_flow.py @@ -0,0 +1,566 @@ +import json +import shutil +import tempfile +from unittest import TestCase +from unittest.mock import patch, Mock, call +import os +from pathlib import Path + +from parameterized import parameterized + +from samcli.commands.exceptions import AppPipelineTemplateMetadataException +from samcli.commands.pipeline.init.interactive_init_flow import ( + InteractiveInitFlow, + PipelineTemplateCloneException, + APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, + shared_path, + CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME, + _prompt_cicd_provider, + _prompt_provider_pipeline_template, + _get_pipeline_template_metadata, + _copy_dir_contents_to_cwd, +) +from samcli.commands.pipeline.init.pipeline_templates_manifest import AppPipelineTemplateManifestException +from samcli.lib.utils.git_repo import CloneRepoException +from samcli.lib.cookiecutter.interactive_flow_creator import QuestionsNotFoundException + + +class TestInteractiveInitFlow(TestCase): + @patch("samcli.commands.pipeline.init.interactive_init_flow._read_app_pipeline_templates_manifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow._prompt_pipeline_template") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._generate_from_pipeline_template") + @patch("samcli.commands.pipeline.init.interactive_init_flow.shared_path") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_app_pipeline_templates_clone_fail_when_an_old_clone_exists( + self, + click_mock, + clone_mock, + shared_path_mock, + generate_from_pipeline_template_mock, + select_pipeline_template_mock, + read_app_pipeline_templates_manifest_mock, + ): + # setup + clone_mock.side_effect = CloneRepoException # clone fail + app_pipeline_templates_path_mock = Mock() + selected_pipeline_template_path_mock = Mock() + pipeline_templates_manifest_mock = Mock() + shared_path_mock.joinpath.return_value = app_pipeline_templates_path_mock + app_pipeline_templates_path_mock.exists.return_value = True # An old clone exists + app_pipeline_templates_path_mock.joinpath.return_value = selected_pipeline_template_path_mock + read_app_pipeline_templates_manifest_mock.return_value = pipeline_templates_manifest_mock + click_mock.prompt.return_value = "1" # App pipeline templates + + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + clone_mock.assert_called_once_with( + shared_path_mock, APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, replace_existing=True + ) + app_pipeline_templates_path_mock.exists.assert_called_once() + read_app_pipeline_templates_manifest_mock.assert_called_once_with(app_pipeline_templates_path_mock) + select_pipeline_template_mock.assert_called_once_with(pipeline_templates_manifest_mock) + generate_from_pipeline_template_mock.assert_called_once_with(selected_pipeline_template_path_mock) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.shared_path") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_app_pipeline_templates_clone_fail_when_no_old_clone_exist(self, click_mock, clone_mock, shared_path_mock): + # setup + clone_mock.side_effect = CloneRepoException # clone fail + app_pipeline_templates_path_mock = Mock() + shared_path_mock.joinpath.return_value = app_pipeline_templates_path_mock + app_pipeline_templates_path_mock.exists.return_value = False # No old clone exists + click_mock.prompt.return_value = "1" # App pipeline templates + + # trigger + with self.assertRaises(PipelineTemplateCloneException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow.click") + @patch("samcli.lib.cookiecutter.question.click") + def test_custom_pipeline_template_clone_fail(self, question_click_mock, init_click_mock, clone_mock): + # setup + clone_mock.side_effect = CloneRepoException # clone fail + question_click_mock.prompt.return_value = "2" # Custom pipeline templates + init_click_mock.prompt.return_value = ( + "https://github.com/any-custom-pipeline-template-repo.git" # Custom pipeline template repo URL + ) + + # trigger + with self.assertRaises(PipelineTemplateCloneException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow._read_app_pipeline_templates_manifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_app_pipeline_templates_with_invalid_manifest( + self, click_mock, clone_mock, read_app_pipeline_templates_manifest_mock + ): + # setup + app_pipeline_templates_path_mock = Mock() + clone_mock.return_value = app_pipeline_templates_path_mock + read_app_pipeline_templates_manifest_mock.side_effect = AppPipelineTemplateManifestException("") + click_mock.prompt.return_value = "1" # App pipeline templates + + # trigger + with self.assertRaises(AppPipelineTemplateManifestException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow.SamConfig") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch("samcli.commands.pipeline.init.interactive_init_flow.PipelineTemplatesManifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_from_app_pipeline_template_happy_case( + self, + click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + clone_mock, + PipelineTemplatesManifest_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + samconfig_mock, + ): + # setup + any_app_pipeline_templates_path = Path( + os.path.normpath(shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME)) + ) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + PipelineTemplatesManifest_mock.return_value = pipeline_templates_manifest_mock + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=cookiecutter_output_dir_mock) + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + config_file = Mock() + samconfig_mock.return_value = config_file + config_file.exists.return_value = True + config_file.get_stage_names.return_value = ["testing", "prod"] + config_file.get_stage_names.return_value = ["testing", "prod"] + config_file.get_all.return_value = {"pipeline_execution_role": "arn:aws:iam::123456789012:role/execution-role"} + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + osutils_mock.mkdir_temp.assert_called() # cookiecutter project is generated to temp + expected_cookicutter_template_location = any_app_pipeline_templates_path.joinpath(jenkins_template_location) + clone_mock.assert_called_once_with(shared_path, APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME, replace_existing=True) + PipelineTemplatesManifest_mock.assert_called_once() + create_interactive_flow_mock.assert_called_once_with( + str(expected_cookicutter_template_location.joinpath("questions.json")) + ) + interactive_flow_mock.run.assert_called_once_with( + { + str(["testing", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["1", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["prod", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["2", "pipeline_execution_role"]): "arn:aws:iam::123456789012:role/execution-role", + str(["stage_names_message"]): "Here are the stage names detected " + f'in {os.path.join(".aws-sam", "pipeline", "pipelineconfig.toml")}:\n\t1 - testing\n\t2 - prod', + } + ) + cookiecutter_mock.assert_called_once_with( + template=str(expected_cookicutter_template_location), + output_dir=cookiecutter_output_dir_mock, + no_input=True, + extra_context=cookiecutter_context_mock, + overwrite_if_exists=True, + ) + + @patch("samcli.commands.pipeline.init.interactive_init_flow._read_app_pipeline_templates_manifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_when_pipeline_template_missing_questions_file( + self, click_mock, clone_mock, read_app_pipeline_templates_manifest_mock + ): + # setup + any_app_pipeline_templates_path = shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + read_app_pipeline_templates_manifest_mock.return_value = pipeline_templates_manifest_mock + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + + # trigger + with self.assertRaises(QuestionsNotFoundException): + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + @patch("samcli.commands.pipeline.init.interactive_init_flow.os") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._generate_from_pipeline_template") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow.click") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_from_custom_local_existing_path_will_not_do_git_clone( + self, + questions_click_mock, + init_click_mock, + clone_mock, + generate_from_pipeline_template_mock, + osutils_mock, + os_mock, + ): + # setup + local_pipeline_templates_path = "/any/existing/local/path" + os_mock.path.exists.return_value = True + questions_click_mock.prompt.return_value = "2" # Custom pipeline templates + init_click_mock.prompt.return_value = local_pipeline_templates_path # git repo path + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + osutils_mock.mkdir_temp.assert_not_called() + clone_mock.assert_not_called() + generate_from_pipeline_template_mock.assert_called_once_with(Path(local_pipeline_templates_path)) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow.click") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_generate_pipeline_configuration_file_from_custom_remote_pipeline_template_happy_case( + self, + questions_click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + init_click_mock, + clone_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + ): + # setup + any_temp_dir = "/tmp/any/dir" + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(side_effect=[any_temp_dir, cookiecutter_output_dir_mock]) + osutils_mock.mkdir_temp.return_value.__exit__ = Mock() + any_custom_pipeline_templates_path = Path(os.path.join(any_temp_dir, CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME)) + clone_mock.return_value = any_custom_pipeline_templates_path + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + _copy_dir_contents_to_cwd_mock.return_value = ["file1"] + + questions_click_mock.prompt.return_value = "2" # Custom pipeline templates + init_click_mock.prompt.return_value = "https://github.com/any-custom-pipeline-template-repo.git" + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + # trigger + InteractiveInitFlow(allow_bootstrap=False).do_interactive() + + # verify + # Custom templates are cloned to temp; cookiecutter project is generated to temp + osutils_mock.mkdir_temp.assert_called() + clone_mock.assert_called_once_with( + Path(any_temp_dir), CUSTOM_PIPELINE_TEMPLATE_REPO_LOCAL_NAME, replace_existing=True + ) + create_interactive_flow_mock.assert_called_once_with( + str(any_custom_pipeline_templates_path.joinpath("questions.json")) + ) + interactive_flow_mock.run.assert_called_once() + cookiecutter_mock.assert_called_once_with( + template=str(any_custom_pipeline_templates_path), + output_dir=cookiecutter_output_dir_mock, + no_input=True, + extra_context=cookiecutter_context_mock, + overwrite_if_exists=True, + ) + + @patch("samcli.lib.cookiecutter.question.click") + def test_prompt_cicd_provider_will_not_prompt_if_the_list_of_providers_has_only_one_provider(self, click_mock): + gitlab_provider = Mock(id="gitlab", display_name="Gitlab CI/CD") + providers = [gitlab_provider] + + chosen_provider = _prompt_cicd_provider(providers) + click_mock.prompt.assert_not_called() + self.assertEqual(chosen_provider, gitlab_provider) + + jenkins_provider = Mock(id="jenkins", display_name="Jenkins") + providers.append(jenkins_provider) + click_mock.prompt.return_value = "2" + chosen_provider = _prompt_cicd_provider(providers) + click_mock.prompt.assert_called_once() + self.assertEqual(chosen_provider, jenkins_provider) + + @patch("samcli.lib.cookiecutter.question.click") + def test_prompt_provider_pipeline_template_will_not_prompt_if_the_list_of_templatess_has_only_one_provider( + self, click_mock + ): + template1 = Mock(display_name="anyName1", location="anyLocation1", provider="a provider") + template2 = Mock(display_name="anyName2", location="anyLocation2", provider="a provider") + templates = [template1] + + chosen_template = _prompt_provider_pipeline_template(templates) + click_mock.prompt.assert_not_called() + self.assertEqual(chosen_template, template1) + + templates.append(template2) + click_mock.prompt.return_value = "2" + chosen_template = _prompt_provider_pipeline_template(templates) + click_mock.prompt.assert_called_once() + self.assertEqual(chosen_template, template2) + + def test_get_pipeline_template_metadata_can_load(self): + with tempfile.TemporaryDirectory() as dir: + metadata = {"number_of_stages": 2} + with open(Path(dir, "metadata.json"), "w") as f: + json.dump(metadata, f) + self.assertEquals(metadata, _get_pipeline_template_metadata(dir)) + + def test_get_pipeline_template_metadata_not_exist(self): + with tempfile.TemporaryDirectory() as dir: + with self.assertRaises(AppPipelineTemplateMetadataException): + _get_pipeline_template_metadata(dir) + + @parameterized.expand( + [ + ('["not_a_dict"]',), + ("not a json"), + ] + ) + def test_get_pipeline_template_metadata_not_valid(self, metadata_str): + with tempfile.TemporaryDirectory() as dir: + with open(Path(dir, "metadata.json"), "w") as f: + f.write(metadata_str) + with self.assertRaises(AppPipelineTemplateMetadataException): + _get_pipeline_template_metadata(dir) + + +class TestInteractiveInitFlowWithBootstrap(TestCase): + @patch("samcli.commands.pipeline.init.interactive_init_flow.SamConfig") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch( + "samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._prompt_run_bootstrap_within_pipeline_init" + ) + @patch("samcli.commands.pipeline.init.interactive_init_flow.PipelineTemplatesManifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_with_bootstrap_but_answer_no( + self, + click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + clone_mock, + PipelineTemplatesManifest_mock, + _prompt_run_bootstrap_within_pipeline_init_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + samconfig_mock, + ): + # setup + any_app_pipeline_templates_path = Path( + os.path.normpath(shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME)) + ) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + PipelineTemplatesManifest_mock.return_value = pipeline_templates_manifest_mock + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=cookiecutter_output_dir_mock) + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + config_file = Mock() + samconfig_mock.return_value = config_file + config_file.exists.return_value = True + config_file.get_stage_names.return_value = ["testing"] + config_file.get_all.return_value = {"pipeline_execution_role": "arn:aws:iam::123456789012:role/execution-role"} + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", + # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + + _prompt_run_bootstrap_within_pipeline_init_mock.return_value = False # not to bootstrap + + # trigger + InteractiveInitFlow(allow_bootstrap=True).do_interactive() + + # verify + _prompt_run_bootstrap_within_pipeline_init_mock.assert_called_once_with(["testing"], 2) + + @parameterized.expand( + [ + ([["testing"], ["testing", "prod"]], [call(["testing"], 2)]), + ([[], ["testing"], ["testing", "prod"]], [call([], 2), call(["testing"], 2)]), + ] + ) + @patch("samcli.commands.pipeline.init.interactive_init_flow.SamConfig") + @patch("samcli.commands.pipeline.init.interactive_init_flow.osutils") + @patch("samcli.lib.cookiecutter.template.cookiecutter") + @patch("samcli.commands.pipeline.init.interactive_init_flow.InteractiveFlowCreator.create_flow") + @patch( + "samcli.commands.pipeline.init.interactive_init_flow.InteractiveInitFlow._prompt_run_bootstrap_within_pipeline_init" + ) + @patch("samcli.commands.pipeline.init.interactive_init_flow.PipelineTemplatesManifest") + @patch("samcli.commands.pipeline.init.interactive_init_flow.GitRepo.clone") + @patch("samcli.commands.pipeline.init.interactive_init_flow._copy_dir_contents_to_cwd") + @patch("samcli.commands.pipeline.init.interactive_init_flow._get_pipeline_template_metadata") + @patch("samcli.lib.cookiecutter.question.click") + def test_with_bootstrap_answer_yes( + self, + get_stage_name_side_effects, + _prompt_run_bootstrap_expected_calls, + click_mock, + _get_pipeline_template_metadata_mock, + _copy_dir_contents_to_cwd_mock, + clone_mock, + PipelineTemplatesManifest_mock, + _prompt_run_bootstrap_within_pipeline_init_mock, + create_interactive_flow_mock, + cookiecutter_mock, + osutils_mock, + samconfig_mock, + ): + # setup + any_app_pipeline_templates_path = Path( + os.path.normpath(shared_path.joinpath(APP_PIPELINE_TEMPLATES_REPO_LOCAL_NAME)) + ) + clone_mock.return_value = any_app_pipeline_templates_path + jenkins_template_location = "some/location" + jenkins_template_mock = Mock( + display_name="Jenkins pipeline template", location=jenkins_template_location, provider="jenkins" + ) + pipeline_templates_manifest_mock = Mock( + providers=[ + Mock(id="gitlab", display_name="Gitlab"), + Mock(id="jenkins", display_name="Jenkins"), + ], + templates=[jenkins_template_mock], + ) + PipelineTemplatesManifest_mock.return_value = pipeline_templates_manifest_mock + cookiecutter_output_dir_mock = "/tmp/any/dir2" + osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=cookiecutter_output_dir_mock) + interactive_flow_mock = Mock() + create_interactive_flow_mock.return_value = interactive_flow_mock + cookiecutter_context_mock = {"key": "value"} + interactive_flow_mock.run.return_value = cookiecutter_context_mock + config_file = Mock() + samconfig_mock.return_value = config_file + config_file.exists.return_value = True + config_file.get_stage_names.side_effect = get_stage_name_side_effects + config_file.get_all.return_value = {"pipeline_execution_role": "arn:aws:iam::123456789012:role/execution-role"} + _get_pipeline_template_metadata_mock.return_value = {"number_of_stages": 2} + + click_mock.prompt.side_effect = [ + "1", # App pipeline templates + "2", + # choose "Jenkins" when prompt for CI/CD system. (See pipeline_templates_manifest_mock, Jenkins is the 2nd provider) + "1", # choose "Jenkins pipeline template" when prompt for pipeline template + ] + + _prompt_run_bootstrap_within_pipeline_init_mock.return_value = True # to bootstrap + + # trigger + InteractiveInitFlow(allow_bootstrap=True).do_interactive() + + # verify + _prompt_run_bootstrap_within_pipeline_init_mock.assert_has_calls(_prompt_run_bootstrap_expected_calls) + + +class TestInteractiveInitFlow_copy_dir_contents_to_cwd(TestCase): + def tearDown(self) -> None: + if Path("file").exists(): + Path("file").unlink() + shutil.rmtree(os.path.join(".aws-sam", "pipeline"), ignore_errors=True) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.click.confirm") + def test_copy_dir_contents_to_cwd_no_need_override(self, confirm_mock): + with tempfile.TemporaryDirectory() as source: + confirm_mock.return_value = True + Path(source, "file").touch() + Path(source, "file").write_text("hi") + file_paths = _copy_dir_contents_to_cwd(source) + confirm_mock.assert_not_called() + self.assertEqual("hi", Path("file").read_text(encoding="utf-8")) + self.assertEqual([str(Path(".", "file"))], file_paths) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.click.confirm") + def test_copy_dir_contents_to_cwd_override(self, confirm_mock): + with tempfile.TemporaryDirectory() as source: + confirm_mock.return_value = True + Path(source, "file").touch() + Path(source, "file").write_text("hi") + Path("file").touch() + file_paths = _copy_dir_contents_to_cwd(source) + confirm_mock.assert_called_once() + self.assertEqual("hi", Path("file").read_text(encoding="utf-8")) + self.assertEqual([str(Path(".", "file"))], file_paths) + + @patch("samcli.commands.pipeline.init.interactive_init_flow.click.confirm") + def test_copy_dir_contents_to_cwd_not_override(self, confirm_mock): + with tempfile.TemporaryDirectory() as source: + confirm_mock.return_value = False + Path(source, "file").touch() + Path(source, "file").write_text("hi") + Path("file").touch() + file_paths = _copy_dir_contents_to_cwd(source) + confirm_mock.assert_called_once() + self.assertEqual("", Path("file").read_text(encoding="utf-8")) + self.assertEqual([str(Path(".aws-sam", "pipeline", "generated-files", "file"))], file_paths) diff --git a/tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py b/tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py new file mode 100644 index 0000000000..d35541c3f6 --- /dev/null +++ b/tests/unit/commands/pipeline/init/test_pipeline_templates_manifest.py @@ -0,0 +1,82 @@ +from unittest import TestCase +import os +from pathlib import Path +from samcli.commands.pipeline.init.pipeline_templates_manifest import ( + Provider, + PipelineTemplatesManifest, + PipelineTemplateMetadata, + AppPipelineTemplateManifestException, +) +from samcli.lib.utils import osutils + +INVALID_YAML_MANIFEST = """ +providers: +- Jenkins with wrong identation +""" + +MISSING_KEYS_MANIFEST = """ +NotProviders: + - Jenkins +Templates: + - NotName: jenkins-two-environments-pipeline + provider: Jenkins + location: templates/cookiecutter-jenkins-two-environments-pipeline +""" + +VALID_MANIFEST = """ +providers: + - displayName: Jenkins + id: jenkins + - displayName: Gitlab CI/CD + id: gitlab + - displayName: Github Actions + id: github-actions +templates: + - displayName: jenkins-two-environments-pipeline + provider: jenkins + location: templates/cookiecutter-jenkins-two-environments-pipeline + - displayName: gitlab-two-environments-pipeline + provider: gitlab + location: templates/cookiecutter-gitlab-two-environments-pipeline + - displayName: Github-Actions-two-environments-pipeline + provider: github-actions + location: templates/cookiecutter-github-actions-two-environments-pipeline +""" + + +class TestCli(TestCase): + def test_manifest_file_not_found(self): + non_existing_path = Path(os.path.normpath("/any/non/existing/manifest.yaml")) + with self.assertRaises(AppPipelineTemplateManifestException): + PipelineTemplatesManifest(manifest_path=non_existing_path) + + def test_invalid_yaml_manifest_file(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + manifest_path = os.path.normpath(os.path.join(tempdir, "manifest.yaml")) + with open(manifest_path, "w", encoding="utf-8") as fp: + fp.write(INVALID_YAML_MANIFEST) + with self.assertRaises(AppPipelineTemplateManifestException): + PipelineTemplatesManifest(manifest_path=Path(manifest_path)) + + def test_manifest_missing_required_keys(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + manifest_path = os.path.normpath(os.path.join(tempdir, "manifest.yaml")) + with open(manifest_path, "w", encoding="utf-8") as fp: + fp.write(MISSING_KEYS_MANIFEST) + with self.assertRaises(AppPipelineTemplateManifestException): + PipelineTemplatesManifest(manifest_path=Path(manifest_path)) + + def test_manifest_happy_case(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + manifest_path = os.path.normpath(os.path.join(tempdir, "manifest.yaml")) + with open(manifest_path, "w", encoding="utf-8") as fp: + fp.write(VALID_MANIFEST) + manifest = PipelineTemplatesManifest(manifest_path=Path(manifest_path)) + self.assertEquals(len(manifest.providers), 3) + gitlab_provider: Provider = next(p for p in manifest.providers if p.id == "gitlab") + self.assertEquals(gitlab_provider.display_name, "Gitlab CI/CD") + self.assertEquals(len(manifest.templates), 3) + gitlab_template: PipelineTemplateMetadata = next(t for t in manifest.templates if t.provider == "gitlab") + self.assertEquals(gitlab_template.display_name, "gitlab-two-environments-pipeline") + self.assertEquals(gitlab_template.provider, "gitlab") + self.assertEquals(gitlab_template.location, "templates/cookiecutter-gitlab-two-environments-pipeline") diff --git a/tests/unit/lib/bootstrap/test_bootstrap.py b/tests/unit/lib/bootstrap/test_bootstrap.py index 8094a404c0..e62ad26a5c 100644 --- a/tests/unit/lib/bootstrap/test_bootstrap.py +++ b/tests/unit/lib/bootstrap/test_bootstrap.py @@ -1,23 +1,45 @@ from unittest import TestCase -from unittest.mock import patch +from unittest.mock import patch, MagicMock -from samcli.commands.exceptions import UserException -from samcli.lib.bootstrap.bootstrap import manage_stack +from samcli.commands.exceptions import UserException, CredentialsError +from samcli.lib.bootstrap.bootstrap import manage_stack, StackOutput, get_current_account_id class TestBootstrapManagedStack(TestCase): @patch("samcli.lib.bootstrap.bootstrap.manage_cloudformation_stack") def test_stack_missing_bucket(self, manage_cfn_stack_mock): - manage_cfn_stack_mock.return_value = [] + manage_cfn_stack_mock.return_value = StackOutput(stack_output=[]) with self.assertRaises(UserException): manage_stack("testProfile", "fakeRegion") - manage_cfn_stack_mock.return_value = [{"OutputKey": "NotSourceBucket", "OutputValue": "AnyValue"}] + manage_cfn_stack_mock.return_value = StackOutput( + stack_output=[{"OutputKey": "NotSourceBucket", "OutputValue": "AnyValue"}] + ) with self.assertRaises(UserException): manage_stack("testProfile", "fakeRegion") @patch("samcli.lib.bootstrap.bootstrap.manage_cloudformation_stack") def test_manage_stack_happy_case(self, manage_cfn_stack_mock): expected_bucket_name = "BucketName" - manage_cfn_stack_mock.return_value = [{"OutputKey": "SourceBucket", "OutputValue": expected_bucket_name}] + manage_cfn_stack_mock.return_value = StackOutput( + stack_output=[{"OutputKey": "SourceBucket", "OutputValue": expected_bucket_name}] + ) actual_bucket_name = manage_stack("testProfile", "fakeRegion") self.assertEqual(actual_bucket_name, expected_bucket_name) + + @patch("samcli.lib.bootstrap.bootstrap.boto3") + def test_get_current_account_id(self, boto3_mock): + session_mock = boto3_mock.Session.return_value = MagicMock() + sts_mock = MagicMock() + sts_mock.get_caller_identity.return_value = {"Account": 1234567890} + session_mock.client.return_value = sts_mock + account_id = get_current_account_id() + self.assertEqual(account_id, 1234567890) + + @patch("samcli.lib.bootstrap.bootstrap.boto3") + def test_get_current_account_id_missing_id(self, boto3_mock): + session_mock = boto3_mock.Session.return_value = MagicMock() + sts_mock = MagicMock() + sts_mock.get_caller_identity.return_value = {} + session_mock.client.return_value = sts_mock + with self.assertRaises(CredentialsError): + get_current_account_id() diff --git a/tests/unit/lib/cookiecutter/test_question.py b/tests/unit/lib/cookiecutter/test_question.py index c46a37fa43..2db7055357 100644 --- a/tests/unit/lib/cookiecutter/test_question.py +++ b/tests/unit/lib/cookiecutter/test_question.py @@ -27,6 +27,7 @@ def setUp(self): key=self._ANY_KEY, default=self._ANY_ANSWER, is_required=True, + allow_autofill=False, next_question_map=self._ANY_NEXT_QUESTION_MAP, default_next_question_key=self._ANY_DEFAULT_NEXT_QUESTION_KEY, ) @@ -151,6 +152,16 @@ def test_ask_resolves_from_cookiecutter_context_with_default_object_missing_keys with self.assertRaises(KeyError): question.ask(context=context) + def test_question_allow_autofill_with_default_value(self): + q = Question(text=self._ANY_TEXT, key=self._ANY_KEY, is_required=True, allow_autofill=True, default="123") + self.assertEquals("123", q.ask()) + + @patch("samcli.lib.cookiecutter.question.click") + def test_question_allow_autofill_without_default_value(self, click_mock): + answer_mock = click_mock.prompt.return_value = Mock() + q = Question(text=self._ANY_TEXT, key=self._ANY_KEY, is_required=True, allow_autofill=True) + self.assertEquals(answer_mock, q.ask()) + class TestChoice(TestCase): def setUp(self): @@ -188,7 +199,11 @@ def test_ask(self, mock_click, mock_choice): answer = self.question.ask({}) self.assertEqual(answer, TestQuestion._ANY_OPTIONS[1]) # we deduct one from user's choice (base 1 vs base 0) mock_click.prompt.assert_called_once_with( - text="Choice", default=self.question.default_answer, show_choices=False, type=ANY + text="Choice", + default=self.question.default_answer, + show_choices=False, + type=ANY, + show_default=self.question.default_answer is not None, ) mock_choice.assert_called_once_with(["1", "2", "3"]) diff --git a/tests/unit/lib/cookiecutter/test_template.py b/tests/unit/lib/cookiecutter/test_template.py index edb7412f59..318939f46b 100644 --- a/tests/unit/lib/cookiecutter/test_template.py +++ b/tests/unit/lib/cookiecutter/test_template.py @@ -114,11 +114,16 @@ def test_generate_project(self, mock_preprocessor, mock_postprocessor, mock_inte postprocessors=[mock_postprocessor], ) mock_preprocessor.run.return_value = self._ANY_PROCESSOR_CONTEXT - t.generate_project(context=self._ANY_INTERACTIVE_FLOW_CONTEXT) + output_dir = Mock() + t.generate_project(context=self._ANY_INTERACTIVE_FLOW_CONTEXT, output_dir=output_dir) mock_interactive_flow.run.assert_not_called() mock_preprocessor.run.assert_called_once_with(self._ANY_INTERACTIVE_FLOW_CONTEXT) mock_cookiecutter.assert_called_with( - template=self._ANY_LOCATION, output_dir=".", no_input=True, extra_context=self._ANY_PROCESSOR_CONTEXT + template=self._ANY_LOCATION, + output_dir=output_dir, + no_input=True, + extra_context=self._ANY_PROCESSOR_CONTEXT, + overwrite_if_exists=True, ) mock_postprocessor.run.assert_called_once_with(self._ANY_PROCESSOR_CONTEXT) @@ -127,7 +132,7 @@ def test_generate_project_preprocessors_exceptions(self, mock_preprocessor): t = Template(location=self._ANY_LOCATION, preprocessors=[mock_preprocessor]) with self.assertRaises(PreprocessingError): mock_preprocessor.run.side_effect = Exception("something went wrong") - t.generate_project({}) + t.generate_project({}, Mock()) @patch("samcli.lib.cookiecutter.template.cookiecutter") @patch("samcli.lib.cookiecutter.processor") @@ -135,7 +140,7 @@ def test_generate_project_postprocessors_exceptions(self, mock_postprocessor, mo t = Template(location=self._ANY_LOCATION, postprocessors=[mock_postprocessor]) with self.assertRaises(PostprocessingError): mock_postprocessor.run.side_effect = Exception("something went wrong") - t.generate_project({}) + t.generate_project({}, Mock()) @patch("samcli.lib.cookiecutter.template.generate_non_cookiecutter_project") @patch("samcli.lib.cookiecutter.template.cookiecutter") @@ -143,13 +148,13 @@ def test_generate_project_cookiecutter_exceptions(self, mock_cookiecutter, mock_ t = Template(location=self._ANY_LOCATION) with self.assertRaises(InvalidLocationError): mock_cookiecutter.side_effect = UnknownRepoType() - t.generate_project({}) + t.generate_project({}, Mock()) mock_cookiecutter.reset_mock() with self.assertRaises(GenerateProjectFailedError): mock_cookiecutter.side_effect = Exception("something went wrong") - t.generate_project({}) + t.generate_project({}, Mock()) mock_cookiecutter.reset_mock() # if the provided template is not a cookiecutter template, we generate a non cookiecutter template mock_cookiecutter.side_effect = RepositoryNotFound() - t.generate_project({}) + t.generate_project({}, Mock()) mock_generate_non_cookiecutter_project.assert_called_once() diff --git a/tests/unit/lib/pipeline/__init__.py b/tests/unit/lib/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/pipeline/bootstrap/__init__.py b/tests/unit/lib/pipeline/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/pipeline/bootstrap/test_environment.py b/tests/unit/lib/pipeline/bootstrap/test_environment.py new file mode 100644 index 0000000000..9a12f2be15 --- /dev/null +++ b/tests/unit/lib/pipeline/bootstrap/test_environment.py @@ -0,0 +1,425 @@ +from unittest import TestCase +from unittest.mock import Mock, patch, call, MagicMock + +from samcli.lib.pipeline.bootstrap.stage import Stage + +ANY_STAGE_NAME = "ANY_STAGE_NAME" +ANY_PIPELINE_USER_ARN = "ANY_PIPELINE_USER_ARN" +ANY_PIPELINE_EXECUTION_ROLE_ARN = "ANY_PIPELINE_EXECUTION_ROLE_ARN" +ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN = "ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN" +ANY_ARTIFACTS_BUCKET_ARN = "ANY_ARTIFACTS_BUCKET_ARN" +ANY_IMAGE_REPOSITORY_ARN = "ANY_IMAGE_REPOSITORY_ARN" +ANY_ARN = "ANY_ARN" + + +class TestStage(TestCase): + def test_stage_name_is_the_only_required_field_to_initialize_an_stage(self): + stage: Stage = Stage(name=ANY_STAGE_NAME) + self.assertEqual(stage.name, ANY_STAGE_NAME) + self.assertIsNone(stage.aws_profile) + self.assertIsNone(stage.aws_region) + self.assertIsNotNone(stage.pipeline_user) + self.assertIsNotNone(stage.pipeline_execution_role) + self.assertIsNotNone(stage.cloudformation_execution_role) + self.assertIsNotNone(stage.artifacts_bucket) + self.assertIsNotNone(stage.image_repository) + + with self.assertRaises(TypeError): + Stage() + + def test_did_user_provide_all_required_resources_when_not_all_resources_are_provided(self): + stage: Stage = Stage(name=ANY_STAGE_NAME) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage(name=ANY_STAGE_NAME, pipeline_user_arn=ANY_PIPELINE_USER_ARN) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + + def test_did_user_provide_all_required_resources_ignore_image_repository_if_it_is_not_required(self): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=False, + ) + self.assertTrue(stage.did_user_provide_all_required_resources()) + + def test_did_user_provide_all_required_resources_when_image_repository_is_required(self): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + ) + self.assertFalse(stage.did_user_provide_all_required_resources()) + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + self.assertTrue(stage.did_user_provide_all_required_resources()) + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_did_user_provide_all_required_resources_returns_false_if_the_stage_was_initialized_without_any_of_the_resources_even_if_fulfilled_after_bootstrap( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + # setup + stack_output = Mock() + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stack_output.get.return_value = ANY_ARN + manage_stack_mock.return_value = stack_output + stage: Stage = Stage(name=ANY_STAGE_NAME) + + self.assertFalse(stage.did_user_provide_all_required_resources()) + + stage.bootstrap(confirm_changeset=False) + # After bootstrapping, all the resources should be fulfilled + self.assertEqual(ANY_ARN, stage.pipeline_user.arn) + self.assertEqual(ANY_ARN, stage.pipeline_execution_role.arn) + self.assertEqual(ANY_ARN, stage.cloudformation_execution_role.arn) + self.assertEqual(ANY_ARN, stage.artifacts_bucket.arn) + self.assertEqual(ANY_ARN, stage.image_repository.arn) + + # although all of the resources got fulfilled, `did_user_provide_all_required_resources` should return false + # as these resources are not provided by the user + self.assertFalse(stage.did_user_provide_all_required_resources()) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + @patch.object(Stage, "did_user_provide_all_required_resources") + def test_bootstrap_will_not_deploy_the_cfn_template_if_all_resources_are_already_provided( + self, did_user_provide_all_required_resources_mock, manage_stack_mock, click_mock + ): + did_user_provide_all_required_resources_mock.return_value = True + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=False) + manage_stack_mock.assert_not_called() + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_confirm_before_deploying_unless_confirm_changeset_is_disabled( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + click_mock.confirm.return_value = False + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=False) + click_mock.confirm.assert_not_called() + manage_stack_mock.assert_called_once() + manage_stack_mock.reset_mock() + stage.bootstrap(confirm_changeset=True) + click_mock.confirm.assert_called_once() + manage_stack_mock.assert_not_called() # As the user choose to not confirm + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_not_deploy_the_cfn_template_if_the_user_did_not_confirm( + self, manage_stack_mock, click_mock + ): + click_mock.confirm.return_value = False + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=True) + manage_stack_mock.assert_not_called() + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_deploy_the_cfn_template_if_the_user_did_confirm( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + click_mock.confirm.return_value = True + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.bootstrap(confirm_changeset=True) + manage_stack_mock.assert_called_once() + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_pass_arns_of_all_user_provided_resources_any_empty_strings_for_other_resources_to_the_cfn_stack( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + click_mock.confirm.return_value = True + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + stage.bootstrap() + manage_stack_mock.assert_called_once() + args, kwargs = manage_stack_mock.call_args_list[0] + expected_parameter_overrides = { + "PipelineUserArn": ANY_PIPELINE_USER_ARN, + "PipelineExecutionRoleArn": "", + "CloudFormationExecutionRoleArn": "", + "ArtifactsBucketArn": ANY_ARTIFACTS_BUCKET_ARN, + "CreateImageRepository": "true", + "ImageRepositoryArn": ANY_IMAGE_REPOSITORY_ARN, + } + self.assertEqual(expected_parameter_overrides, kwargs["parameter_overrides"]) + + @patch("samcli.lib.pipeline.bootstrap.stage.Stage._get_pipeline_user_secret_pair") + @patch("samcli.lib.pipeline.bootstrap.stage.click") + @patch("samcli.lib.pipeline.bootstrap.stage.manage_stack") + def test_bootstrap_will_fullfill_all_resource_arns( + self, manage_stack_mock, click_mock, pipeline_user_secret_pair_mock + ): + # setup + pipeline_user_secret_pair_mock.return_value = ("id", "secret") + stack_output = Mock() + stack_output.get.return_value = ANY_ARN + manage_stack_mock.return_value = stack_output + stage: Stage = Stage(name=ANY_STAGE_NAME) + click_mock.confirm.return_value = True + + # verify resources' ARNS are empty + self.assertIsNone(stage.pipeline_user.arn) + self.assertIsNone(stage.pipeline_execution_role.arn) + self.assertIsNone(stage.cloudformation_execution_role.arn) + self.assertIsNone(stage.artifacts_bucket.arn) + + # trigger + stage.bootstrap() + + # verify + manage_stack_mock.assert_called_once() + self.assertEqual(ANY_ARN, stage.pipeline_user.arn) + self.assertEqual(ANY_ARN, stage.pipeline_execution_role.arn) + self.assertEqual(ANY_ARN, stage.cloudformation_execution_role.arn) + self.assertEqual(ANY_ARN, stage.artifacts_bucket.arn) + + @patch("samcli.lib.pipeline.bootstrap.stage.SamConfig") + def test_save_config_escapes_none_resources(self, samconfig_mock): + cmd_names = ["any", "commands"] + samconfig_instance_mock = Mock() + samconfig_mock.return_value = samconfig_instance_mock + stage: Stage = Stage(name=ANY_STAGE_NAME) + + empty_ecr_call = call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="image_repository", + value="", + ) + + expected_calls = [] + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.pipeline_user.arn = ANY_PIPELINE_USER_ARN + expected_calls.append( + call(cmd_names=cmd_names, section="parameters", key="pipeline_user", value=ANY_PIPELINE_USER_ARN) + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.pipeline_execution_role.arn = ANY_PIPELINE_EXECUTION_ROLE_ARN + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="pipeline_execution_role", + value=ANY_PIPELINE_EXECUTION_ROLE_ARN, + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.cloudformation_execution_role.arn = ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="cloudformation_execution_role", + value=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.artifacts_bucket.arn = "arn:aws:s3:::artifact_bucket_name" + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="artifacts_bucket", + value="artifact_bucket_name", + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) + + stage.image_repository.arn = "arn:aws:ecr:us-east-2:111111111111:repository/image_repository_name" + expected_calls.append( + call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="image_repository", + value="111111111111.dkr.ecr.us-east-2.amazonaws.com/image_repository_name", + ) + ) + self.trigger_and_assert_save_config_calls(stage, cmd_names, expected_calls, samconfig_instance_mock.put) + + def trigger_and_assert_save_config_calls(self, stage, cmd_names, expected_calls, samconfig_put_mock): + stage.save_config(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=cmd_names) + self.assertEqual(len(expected_calls), samconfig_put_mock.call_count) + samconfig_put_mock.assert_has_calls(expected_calls, any_order=True) + samconfig_put_mock.reset_mock() + + @patch("samcli.lib.pipeline.bootstrap.stage.boto3") + def test_getting_pipeline_user_credentials(self, boto3_mock): + sm_client_mock = MagicMock() + sm_client_mock.get_secret_value.return_value = { + "SecretString": '{"aws_access_key_id": "AccessKeyId", "aws_secret_access_key": "SuperSecretKey"}' + } + session_mock = MagicMock() + session_mock.client.return_value = sm_client_mock + boto3_mock.Session.return_value = session_mock + + (key, secret) = Stage._get_pipeline_user_secret_pair("dummy_arn", None, "dummy-region") + self.assertEqual(key, "AccessKeyId") + self.assertEqual(secret, "SuperSecretKey") + sm_client_mock.get_secret_value.assert_called_once_with(SecretId="dummy_arn") + + @patch("samcli.lib.pipeline.bootstrap.stage.SamConfig") + def test_save_config_ignores_exceptions_thrown_while_calculating_artifacts_bucket_name(self, samconfig_mock): + samconfig_instance_mock = Mock() + samconfig_mock.return_value = samconfig_instance_mock + stage: Stage = Stage(name=ANY_STAGE_NAME, artifacts_bucket_arn="invalid ARN") + # calling artifacts_bucket.name() during save_config() will raise a ValueError exception, we need to make sure + # this exception is swallowed so that other configs can be safely saved to the pipelineconfig.toml file + stage.save_config(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=["any", "commands"]) + + @patch("samcli.lib.pipeline.bootstrap.stage.SamConfig") + def test_save_config_ignores_exceptions_thrown_while_calculating_image_repository_uri(self, samconfig_mock): + samconfig_instance_mock = Mock() + samconfig_mock.return_value = samconfig_instance_mock + stage: Stage = Stage(name=ANY_STAGE_NAME, image_repository_arn="invalid ARN") + # calling image_repository.get_uri() during save_config() will raise a ValueError exception, we need to make + # sure this exception is swallowed so that other configs can be safely saved to the pipelineconfig.toml file + stage.save_config(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=["any", "commands"]) + + @patch.object(Stage, "save_config") + def test_save_config_safe(self, save_config_mock): + save_config_mock.side_effect = Exception + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.save_config_safe(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=["commands"]) + save_config_mock.assert_called_once_with("any_config_dir", "any_pipeline.toml", ["commands"]) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_when_no_resources_provided_by_the_user(self, click_mock): + stage: Stage = Stage(name=ANY_STAGE_NAME) + stage.print_resources_summary() + self.assert_summary_has_a_message_like("The following resources were created in your account", click_mock.secho) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_when_all_resources_are_provided_by_the_user(self, click_mock): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + pipeline_execution_role_arn=ANY_PIPELINE_EXECUTION_ROLE_ARN, + cloudformation_execution_role_arn=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + stage.print_resources_summary() + self.assert_summary_does_not_have_a_message_like( + "The following resources were created in your account", click_mock.secho + ) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_when_some_resources_are_provided_by_the_user(self, click_mock): + stage: Stage = Stage( + name=ANY_STAGE_NAME, + pipeline_user_arn=ANY_PIPELINE_USER_ARN, + artifacts_bucket_arn=ANY_ARTIFACTS_BUCKET_ARN, + create_image_repository=True, + image_repository_arn=ANY_IMAGE_REPOSITORY_ARN, + ) + stage.print_resources_summary() + self.assert_summary_has_a_message_like("The following resources were created in your account", click_mock.secho) + + @patch("samcli.lib.pipeline.bootstrap.stage.click") + def test_print_resources_summary_prints_the_credentials_of_the_pipeline_user_iff_not_provided_by_the_user( + self, click_mock + ): + stage_with_provided_pipeline_user: Stage = Stage(name=ANY_STAGE_NAME, pipeline_user_arn=ANY_PIPELINE_USER_ARN) + stage_with_provided_pipeline_user.print_resources_summary() + self.assert_summary_does_not_have_a_message_like("AWS_ACCESS_KEY_ID", click_mock.secho) + self.assert_summary_does_not_have_a_message_like("AWS_SECRET_ACCESS_KEY", click_mock.secho) + click_mock.secho.reset_mock() + + stage_without_provided_pipeline_user: Stage = Stage(name=ANY_STAGE_NAME) + stage_without_provided_pipeline_user.print_resources_summary() + self.assert_summary_has_a_message_like("AWS_ACCESS_KEY_ID", click_mock.secho) + self.assert_summary_has_a_message_like("AWS_SECRET_ACCESS_KEY", click_mock.secho) + + def assert_summary_has_a_message_like(self, msg, click_secho_mock): + self.assertTrue( + self.does_summary_have_a_message_like(msg, click_secho_mock), + msg=f'stage resources summary does not include "{msg}" which is unexpected', + ) + + def assert_summary_does_not_have_a_message_like(self, msg, click_secho_mock): + self.assertFalse( + self.does_summary_have_a_message_like(msg, click_secho_mock), + msg=f'stage resources summary includes "{msg}" which is unexpected', + ) + + @staticmethod + def does_summary_have_a_message_like(msg, click_secho_mock): + msg = msg.lower() + for kall in click_secho_mock.call_args_list: + args, kwargs = kall + if args: + message = args[0].lower() + else: + message = kwargs.get("message", "").lower() + if msg in message: + return True + return False diff --git a/tests/unit/lib/pipeline/bootstrap/test_resource.py b/tests/unit/lib/pipeline/bootstrap/test_resource.py new file mode 100644 index 0000000000..f7dcab50f2 --- /dev/null +++ b/tests/unit/lib/pipeline/bootstrap/test_resource.py @@ -0,0 +1,81 @@ +from unittest import TestCase + +from samcli.lib.pipeline.bootstrap.resource import ARNParts, Resource, IAMUser, ECRImageRepository + +VALID_ARN = "arn:partition:service:region:account-id:resource-id" +INVALID_ARN = "ARN" + + +class TestArnParts(TestCase): + def test_arn_parts_of_valid_arn(self): + arn_parts: ARNParts = ARNParts(arn=VALID_ARN) + self.assertEqual(arn_parts.partition, "partition") + self.assertEqual(arn_parts.service, "service") + self.assertEqual(arn_parts.region, "region") + self.assertEqual(arn_parts.account_id, "account-id") + self.assertEqual(arn_parts.resource_id, "resource-id") + + def test_arn_parts_of_invalid_arn(self): + with self.assertRaises(ValueError): + invalid_arn = "invalid_arn" + ARNParts(arn=invalid_arn) + + +class TestResource(TestCase): + def test_resource(self): + resource = Resource(arn=VALID_ARN, comment="") + self.assertEqual(resource.arn, VALID_ARN) + self.assertTrue(resource.is_user_provided) + self.assertEqual(resource.name(), "resource-id") + + resource = Resource(arn=INVALID_ARN, comment="") + self.assertEqual(resource.arn, INVALID_ARN) + self.assertTrue(resource.is_user_provided) + with self.assertRaises(ValueError): + resource.name() + + resource = Resource(arn=None, comment="") + self.assertIsNone(resource.arn) + self.assertFalse(resource.is_user_provided) + self.assertIsNone(resource.name()) + + +class TestIAMUser(TestCase): + def test_create_iam_user(self): + user: IAMUser = IAMUser(arn=VALID_ARN, comment="user") + self.assertEquals(user.arn, VALID_ARN) + self.assertEquals(user.comment, "user") + self.assertIsNone(user.access_key_id) + self.assertIsNone(user.secret_access_key) + + user = IAMUser( + arn=INVALID_ARN, + access_key_id="any_access_key_id", + secret_access_key="any_secret_access_key", + comment="user", + ) + self.assertEquals(user.arn, INVALID_ARN) + self.assertEquals(user.comment, "user") + self.assertEquals(user.access_key_id, "any_access_key_id") + self.assertEquals(user.secret_access_key, "any_secret_access_key") + + +class TestECRImageRepository(TestCase): + def test_get_uri_with_valid_ecr_arn(self): + valid_ecr_arn = "arn:partition:service:region:account-id:repository/repository-name" + repo: ECRImageRepository = ECRImageRepository(arn=valid_ecr_arn, comment="ecr") + self.assertEqual(repo.get_uri(), "account-id.dkr.ecr.region.amazonaws.com/repository-name") + self.assertEquals("ecr", repo.comment) + + def test_get_uri_with_invalid_ecr_arn(self): + repo = ECRImageRepository(arn=INVALID_ARN, comment="ecr") + with self.assertRaises(ValueError): + repo.get_uri() + + def test_get_uri_with_valid_aws_arn_that_is_invalid_ecr_arn(self): + ecr_arn_missing_repository_prefix = ( + "arn:partition:service:region:account-id:repository-name-without-repository/-prefix" + ) + repo = ECRImageRepository(arn=ecr_arn_missing_repository_prefix, comment="ecr") + with self.assertRaises(ValueError): + repo.get_uri() diff --git a/tests/unit/lib/samconfig/test_samconfig.py b/tests/unit/lib/samconfig/test_samconfig.py index 74c9ee9661..42017d5490 100644 --- a/tests/unit/lib/samconfig/test_samconfig.py +++ b/tests/unit/lib/samconfig/test_samconfig.py @@ -1,11 +1,11 @@ import os from pathlib import Path - from unittest import TestCase from samcli.lib.config.exceptions import SamConfigVersionException +from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME, DEFAULT_GLOBAL_CMDNAME, DEFAULT_ENV from samcli.lib.config.version import VERSION_KEY, SAM_CONFIG_VERSION -from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME, DEFAULT_GLOBAL_CMDNAME +from samcli.lib.utils import osutils class TestSamConfig(TestCase): @@ -27,14 +27,25 @@ def _check_config_file(self): self.assertTrue(self.samconfig.sanity_check()) self.assertEqual(SAM_CONFIG_VERSION, self.samconfig.document.get(VERSION_KEY)) - def _update_samconfig(self, cmd_names, section, key, value, env): - self.samconfig.put(cmd_names=cmd_names, section=section, key=key, value=value, env=env) + def _update_samconfig(self, cmd_names, section, key, value, env=None): + if env: + self.samconfig.put(cmd_names=cmd_names, section=section, key=key, value=value, env=env) + else: + self.samconfig.put(cmd_names=cmd_names, section=section, key=key, value=value) self.samconfig.flush() self._check_config_file() def test_init(self): self.assertEqual(self.samconfig.filepath, Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME)) + def test_get_stage_names(self): + self.assertEqual(self.samconfig.get_stage_names(), []) + self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401, env="stage1") + self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401, env="stage2") + self.assertEqual(self.samconfig.get_stage_names(), ["stage1", "stage2"]) + self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401) + self.assertEqual(self.samconfig.get_stage_names(), ["stage1", "stage2", DEFAULT_ENV]) + def test_param_overwrite(self): self._update_samconfig(cmd_names=["myCommand"], section="mySection", key="port", value=5401, env="myEnv") self.assertEqual( @@ -195,3 +206,18 @@ def test_write_config_file_non_standard_version(self): self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="skip_pull_image", value=True) self.samconfig.sanity_check() self.assertEqual(self.samconfig.document.get(VERSION_KEY), 0.2) + + def test_write_config_file_will_create_the_file_if_not_exist(self): + with osutils.mkdir_temp(ignore_errors=True) as tempdir: + non_existing_dir = os.path.join(tempdir, "non-existing-dir") + non_existing_file = "non-existing-file" + samconfig = SamConfig(config_dir=non_existing_dir, filename=non_existing_file) + + self.assertFalse(samconfig.exists()) + + samconfig.flush() + self.assertFalse(samconfig.exists()) # nothing to write, no need to create the file + + samconfig.put(cmd_names=["any", "command"], section="any-section", key="any-key", value="any-value") + samconfig.flush() + self.assertTrue(samconfig.exists()) diff --git a/tests/unit/lib/utils/test_managed_cloudformation_stack.py b/tests/unit/lib/utils/test_managed_cloudformation_stack.py index 9f1ea0915a..fd21b792f1 100644 --- a/tests/unit/lib/utils/test_managed_cloudformation_stack.py +++ b/tests/unit/lib/utils/test_managed_cloudformation_stack.py @@ -21,19 +21,28 @@ def _stubbed_cf_client(self): def test_session_missing_profile(self, boto_mock): boto_mock.side_effect = ProfileNotFound(profile="test-profile") with self.assertRaises(CredentialsError): - manage_stack("test-profile", "fake-region", SAM_CLI_STACK_NAME, _get_stack_template()) + manage_stack( + profile="test-profile", + region="fake-region", + stack_name=SAM_CLI_STACK_NAME, + template_body=_get_stack_template(), + ) @patch("boto3.client") def test_client_missing_credentials(self, boto_mock): boto_mock.side_effect = NoCredentialsError() with self.assertRaises(CredentialsError): - manage_stack(None, "fake-region", SAM_CLI_STACK_NAME, _get_stack_template()) + manage_stack( + profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() + ) @patch("boto3.client") def test_client_missing_region(self, boto_mock): boto_mock.side_effect = NoRegionError() with self.assertRaises(RegionError): - manage_stack(None, "fake-region", SAM_CLI_STACK_NAME, _get_stack_template()) + manage_stack( + profile=None, region="fake-region", stack_name=SAM_CLI_STACK_NAME, template_body=_get_stack_template() + ) def test_new_stack(self): stub_cf, stubber = self._stubbed_cf_client() @@ -47,6 +56,8 @@ def test_new_stack(self): "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", + "Capabilities": ["CAPABILITY_IAM"], + "Parameters": [], } ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} stubber.add_response("create_change_set", ccs_resp, ccs_params) @@ -151,6 +162,8 @@ def test_change_set_creation_fails(self): "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", + "Capabilities": ["CAPABILITY_IAM"], + "Parameters": [], } stubber.add_client_error("create_change_set", service_error_code="ClientError", expected_params=ccs_params) stubber.activate() @@ -171,6 +184,8 @@ def test_change_set_execution_fails(self): "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], "ChangeSetType": "CREATE", "ChangeSetName": "InitialCreation", + "Capabilities": ["CAPABILITY_IAM"], + "Parameters": [], } ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} stubber.add_response("create_change_set", ccs_resp, ccs_params) From ecd02bbfb9c99cf4e71ce342123cb47ed30fb7b7 Mon Sep 17 00:00:00 2001 From: Raymond Wang <14915548+wchengru@users.noreply.github.com> Date: Tue, 20 Jul 2021 14:03:49 -0700 Subject: [PATCH 15/24] chore: bump aws-lambda-builder version to 1.5.0 (#3086) --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 25efa93b05..a8131192bc 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,6 +12,6 @@ docker~=4.2.0 dateparser~=0.7 requests==2.25.1 serverlessrepo==0.1.10 -aws_lambda_builders==1.4.0 +aws_lambda_builders==1.5.0 tomlkit==0.7.2 watchdog==2.1.2 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index a2f725e5fd..ae068e1c8e 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -12,10 +12,10 @@ attrs==20.3.0 \ --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 # via jsonschema -aws-lambda-builders==1.4.0 \ - --hash=sha256:3f885433bb71bae653b520e3cf4c31fe5f5b977cb770d42c631af155cd60fd2b \ - --hash=sha256:5d4e4ecb3d3290f0eec1f62b7b0d9d6b91160ae71447d95899eede392d05f75f \ - --hash=sha256:d32f79cf67b189a7598793f69797f284b2eb9a9fada562175b1e854187f95aed +aws-lambda-builders==1.5.0 \ + --hash=sha256:0167b40da88c679e21341852faf59fae2aafe36c22a560de2e4aa75c7b9dd846 \ + --hash=sha256:6fd7fddd50b7bbbb8668c44c638d685123a698bf1a866da2f34b440bca9958ad \ + --hash=sha256:c9f2259656353f98e70c49ed52b6ea8891d1f6853c2b1a9ac891772768d1697e # via aws-sam-cli (setup.py) aws-sam-translator==1.37.0 \ --hash=sha256:12cbf4af9e95acf73dabfbc44af990dc1e880f35697bb8c04f31b3bb90ab5526 \ From 3fda4c5183d638c69facf2b45839869c7a80f06f Mon Sep 17 00:00:00 2001 From: Tarun Date: Tue, 20 Jul 2021 14:04:00 -0700 Subject: [PATCH 16/24] chore: update to aws-sam-translator 1.38.0 (#3073) --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 8 +-- .../lib/models/api_request_model.yaml | 9 +++ .../models/api_request_model_openapi_3.yaml | 12 ++++ .../lib/models/api_with_apikey_required.yaml | 8 +++ .../api_with_apikey_required_openapi_3.yaml | 8 +++ .../lib/models/api_with_auth_all_maximum.yaml | 52 +++++++++++++++- .../api_with_auth_all_maximum_openapi_3.yaml | 52 +++++++++++++++- .../lib/models/api_with_auth_all_minimum.yaml | 18 ++++++ .../api_with_auth_all_minimum_openapi.yaml | 18 ++++++ .../lib/models/api_with_auth_no_default.yaml | 18 ++++++ .../api_with_aws_account_blacklist.yaml | 6 ++ .../api_with_aws_account_whitelist.yaml | 13 ++++ ...api_with_cors_and_auth_preflight_auth.yaml | 7 +++ ...cors_and_conditions_no_definitionbody.yaml | 7 +++ .../api_with_cors_and_only_methods.yaml | 5 ++ .../api_with_cors_no_definitionbody.yaml | 7 +++ ...efault_aws_iam_auth_and_no_auth_route.yaml | 14 +++++ ...h_if_conditional_with_resource_policy.yaml | 7 +++ .../models/api_with_method_aws_iam_auth.yaml | 26 ++++++++ .../validate/lib/models/api_with_mode.yaml | 22 +++++++ .../lib/models/api_with_open_api_version.yaml | 5 ++ .../models/api_with_open_api_version_2.yaml | 5 ++ .../lib/models/api_with_path_parameters.yaml | 6 ++ .../lib/models/api_with_resource_policy.yaml | 7 +++ ..._with_resource_policy_global_implicit.yaml | 15 +++++ .../lib/models/api_with_resource_refs.yaml | 5 ++ .../models/api_with_source_vpc_blacklist.yaml | 5 ++ .../models/api_with_source_vpc_whitelist.yaml | 10 +++ ...pi_with_swagger_and_openapi_with_auth.yaml | 5 ++ .../api_with_swagger_authorizer_none.yaml | 24 ++++++++ .../lib/models/api_with_usageplans.yaml | 7 +++ ...th_usageplans_shared_no_side_effect_1.yaml | 61 +++++++++++++++++++ ...th_usageplans_shared_no_side_effect_2.yaml | 34 +++++++++++ 34 files changed, 497 insertions(+), 11 deletions(-) create mode 100644 tests/functional/commands/validate/lib/models/api_with_mode.yaml create mode 100644 tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml create mode 100644 tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml diff --git a/requirements/base.txt b/requirements/base.txt index a8131192bc..d20eee865b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ boto3~=1.14 jmespath~=0.10.0 PyYAML~=5.3 cookiecutter~=1.7.2 -aws-sam-translator==1.37.0 +aws-sam-translator==1.38.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=4.2.0 dateparser~=0.7 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index ae068e1c8e..34bac7b600 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -17,10 +17,10 @@ aws-lambda-builders==1.5.0 \ --hash=sha256:6fd7fddd50b7bbbb8668c44c638d685123a698bf1a866da2f34b440bca9958ad \ --hash=sha256:c9f2259656353f98e70c49ed52b6ea8891d1f6853c2b1a9ac891772768d1697e # via aws-sam-cli (setup.py) -aws-sam-translator==1.37.0 \ - --hash=sha256:12cbf4af9e95acf73dabfbc44af990dc1e880f35697bb8c04f31b3bb90ab5526 \ - --hash=sha256:26e4866627e4284afc367bee2bd04d3cf23cecc8ff879b419457715a738395a9 \ - --hash=sha256:6884d942a815450637bac48e297996df2dacc27077d25ced09d8e9ce1f6a585c +aws-sam-translator==1.38.0 \ + --hash=sha256:0ecadda9cf5ab2318f57f1253181a2151e4c53cd35d21717a923c075a5a65cb6 \ + --hash=sha256:dc6b816bb5cfd9709299f9b263fc0cf5ae60aca4166d1c90413ece651f1556bb \ + --hash=sha256:ee7c7c5e44ec67202622ca877140545496527ffcc45da3beeda966f007443a88 # via aws-sam-cli (setup.py) binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ diff --git a/tests/functional/commands/validate/lib/models/api_request_model.yaml b/tests/functional/commands/validate/lib/models/api_request_model.yaml index 4dc0c5f423..5c1d96b073 100644 --- a/tests/functional/commands/validate/lib/models/api_request_model.yaml +++ b/tests/functional/commands/validate/lib/models/api_request_model.yaml @@ -15,6 +15,15 @@ Resources: RequestModel: Model: User Required: true + AnyPath: + Type: Api + Properties: + RestApiId: HtmlApi + Path: /any + Method: any + RequestModel: + Model: User + Required: true HtmlApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml b/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml index 2e9a7d26d2..69e003ebdb 100644 --- a/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml +++ b/tests/functional/commands/validate/lib/models/api_request_model_openapi_3.yaml @@ -27,6 +27,18 @@ Resources: Path: /iam Auth: Authorizer: AWS_IAM + AnyIam: + Type: Api + Properties: + RequestModel: + Model: User + Required: true + RestApiId: + Ref: HtmlApi + Method: any + Path: /any/iam + Auth: + Authorizer: AWS_IAM HtmlApi: diff --git a/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml b/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml index 4ae8e52680..27dfe9a720 100644 --- a/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_apikey_required.yaml @@ -19,3 +19,11 @@ Resources: Method: get Auth: ApiKeyRequired: true + MyApiWithApiKeyRequiredAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/ApiKeyRequiredTrue + Method: any + Auth: + ApiKeyRequired: true diff --git a/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml b/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml index e3140b5945..bd962b7709 100644 --- a/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_apikey_required_openapi_3.yaml @@ -20,3 +20,11 @@ Resources: Method: get Auth: ApiKeyRequired: true + MyApiWithApiKeyRequiredAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/ApiKeyRequiredTrue + Method: any + Auth: + ApiKeyRequired: true diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml index 831425e6da..67e3f4a8eb 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum.yaml @@ -50,7 +50,7 @@ Resources: Context: - Authorization4 ReauthorizeEvery: 0 - + MyFunction: Type: AWS::Serverless::Function Properties: @@ -66,6 +66,14 @@ Resources: Method: get Auth: Authorizer: NONE + WithNoAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/noauth + Method: any + Auth: + Authorizer: NONE WithCognitoMultipleUserPoolsAuthorizer: Type: Api Properties: @@ -74,6 +82,14 @@ Resources: Method: post Auth: Authorizer: MyCognitoAuthMultipleUserPools + WithCognitoMultipleUserPoolsAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/cognitomultiple + Method: any + Auth: + Authorizer: MyCognitoAuthMultipleUserPools WithLambdaTokenAuthorizer: Type: Api Properties: @@ -82,7 +98,15 @@ Resources: Method: get Auth: Authorizer: MyLambdaTokenAuth - WithLambdaTokenAuthorizer: + WithLambdaTokenAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatoken + Method: any + Auth: + Authorizer: MyLambdaTokenAuth + WithLambdaTokenNoneAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi @@ -90,6 +114,14 @@ Resources: Method: patch Auth: Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole + WithLambdaTokenNoneAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatokennone + Method: any + Auth: + Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole WithLambdaRequestAuthorizer: Type: Api Properties: @@ -98,9 +130,23 @@ Resources: Method: delete Auth: Authorizer: MyLambdaRequestAuth + WithLambdaRequestAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdarequest + Method: any + Auth: + Authorizer: MyLambdaRequestAuth WithDefaultAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi Path: /users - Method: put \ No newline at end of file + Method: put + WithDefaultAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/default + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml index 0012f8bc14..5c8d3597eb 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_maximum_openapi_3.yaml @@ -51,7 +51,7 @@ Resources: Context: - Authorization4 ReauthorizeEvery: 0 - + MyFunction: Type: AWS::Serverless::Function Properties: @@ -67,6 +67,14 @@ Resources: Method: get Auth: Authorizer: NONE + WithNoAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/noauth + Method: any + Auth: + Authorizer: NONE WithCognitoMultipleUserPoolsAuthorizer: Type: Api Properties: @@ -75,6 +83,14 @@ Resources: Method: post Auth: Authorizer: MyCognitoAuthMultipleUserPools + WithCognitoMultipleUserPoolsAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/cognitomultiple + Method: any + Auth: + Authorizer: MyCognitoAuthMultipleUserPools WithLambdaTokenAuthorizer: Type: Api Properties: @@ -83,7 +99,15 @@ Resources: Method: get Auth: Authorizer: MyLambdaTokenAuth - WithLambdaTokenAuthorizer: + WithLambdaTokenAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatoken + Method: any + Auth: + Authorizer: MyLambdaTokenAuth + WithLambdaTokenNoneAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi @@ -91,6 +115,14 @@ Resources: Method: patch Auth: Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole + WithLambdaTokenNoneAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdatokennone + Method: any + Auth: + Authorizer: MyLambdaTokenAuthNoneFunctionInvokeRole WithLambdaRequestAuthorizer: Type: Api Properties: @@ -99,9 +131,23 @@ Resources: Method: delete Auth: Authorizer: MyLambdaRequestAuth + WithLambdaRequestAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/lambdarequest + Method: any + Auth: + Authorizer: MyLambdaRequestAuth WithDefaultAuthorizer: Type: Api Properties: RestApiId: !Ref MyApi Path: /users - Method: put \ No newline at end of file + Method: put + WithDefaultAuthorizerAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApi + Path: /any/default + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml index f6eda0af2c..399df76126 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum.yaml @@ -51,18 +51,36 @@ Resources: RestApiId: !Ref MyApiWithCognitoAuth Method: get Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Path: /any/cognito LambdaToken: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaTokenAuth Method: get Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Path: /any/lambda-token LambdaRequest: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaRequestAuth Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml index 486bd1250f..bfa377bbbf 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_all_minimum_openapi.yaml @@ -54,18 +54,36 @@ Resources: RestApiId: !Ref MyApiWithCognitoAuth Method: get Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Path: /any/cognito LambdaToken: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaTokenAuth Method: get Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Path: /any/lambda-token LambdaRequest: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaRequestAuth Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml b/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml index 85d591b06e..3f3900386c 100644 --- a/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_auth_no_default.yaml @@ -48,18 +48,36 @@ Resources: RestApiId: !Ref MyApiWithCognitoAuth Method: get Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Path: /any/cognito LambdaToken: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaTokenAuth Method: get Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Path: /any/lambda-token LambdaRequest: Type: Api Properties: RestApiId: !Ref MyApiWithLambdaRequestAuth Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml b/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml index b93e63d9b6..19b51412a9 100644 --- a/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_aws_account_blacklist.yaml @@ -23,3 +23,9 @@ Resources: Properties: Method: Put Path: /get + Any: + Type: Api + Properties: + Method: any + Path: /any + diff --git a/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml b/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml index c69a9b64f3..ff55cbae2b 100644 --- a/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_aws_account_whitelist.yaml @@ -26,3 +26,16 @@ Resources: ] Method: Put Path: /get + Any: + Type: Api + Properties: + Auth: + ResourcePolicy: + AwsAccountWhitelist: [ + "12345" + ] + AwsAccountBlacklist: [ + "67890" + ] + Method: any + Path: /any diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml index e984428c15..1fb222b890 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_and_auth_preflight_auth.yaml @@ -24,6 +24,13 @@ Resources: Method: post RestApiId: !Ref ServerlessApi + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any + RestApiId: !Ref ServerlessApi + ServerlessApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml index 6070b112d9..5075726ae7 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_and_conditions_no_definitionbody.yaml @@ -48,6 +48,13 @@ Resources: Path: / Method: post + AnyHtml: + Type: Api + Properties: + RestApiId: !Ref ExplicitApi + Path: /any + Method: any + ExplicitApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml index 1ee2d92883..724de43017 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_and_only_methods.yaml @@ -16,4 +16,9 @@ Resources: Properties: Path: / Method: get + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any diff --git a/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml b/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml index f8b7bcd522..7d496c2f9b 100644 --- a/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_cors_no_definitionbody.yaml @@ -27,6 +27,13 @@ Resources: Path: / Method: post + AnyHtml: + Type: Api + Properties: + RestApiId: !Ref ExplicitApi + Path: /any + Method: any + ExplicitApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml b/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml index 8bad587889..d3d69d577c 100644 --- a/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_default_aws_iam_auth_and_no_auth_route.yaml @@ -19,6 +19,12 @@ Resources: RestApiId: !Ref MyApiWithAwsIamAuth Path: / Method: post + MyApiWithAwsIamAuthAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithAwsIamAuth + Path: /any/iam + Method: any MyApiWithNoAuth: Type: Api Properties: @@ -27,3 +33,11 @@ Resources: Method: get Auth: Authorizer: 'NONE' + MyApiWithNoAuthAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithAwsIamAuth + Path: /any/none + Method: any + Auth: + Authorizer: 'NONE' diff --git a/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml b/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml index 3ffecb9b74..cfbc74ec1e 100644 --- a/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_if_conditional_with_resource_policy.yaml @@ -50,5 +50,12 @@ Resources: Ref: ExplicitApi Path: /three Method: put + AnyHtml: + Type: Api + Properties: + RestApiId: + Ref: ExplicitApi + Path: /any + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml b/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml index 8a1c8c6da2..16c06dc43e 100644 --- a/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_method_aws_iam_auth.yaml @@ -37,3 +37,29 @@ Resources: Auth: Authorizer: AWS_IAM InvokeRole: CALLER_CREDENTIALS + MyApiWithAwsIamAuthAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/one + Method: any + Auth: + Authorizer: AWS_IAM + MyApiWithAwsIamAuthAndCustomInvokeRoleAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/two + Method: any + Auth: + Authorizer: AWS_IAM + InvokeRole: rn:aws:iam::123:role/AUTH_AWS_IAM + MyApiWithAwsIamAuthAndDefaultInvokeRoleAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithoutAuth + Path: /any/three + Method: any + Auth: + Authorizer: AWS_IAM + InvokeRole: CALLER_CREDENTIALS diff --git a/tests/functional/commands/validate/lib/models/api_with_mode.yaml b/tests/functional/commands/validate/lib/models/api_with_mode.yaml new file mode 100644 index 0000000000..8df0693af4 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/api_with_mode.yaml @@ -0,0 +1,22 @@ +Resources: + Function: + Type: AWS::Serverless::Function + Properties: + CodeUri: s3://sam-demo-bucket/member_portal.zip + Handler: index.gethtml + Runtime: nodejs12.x + Events: + GetHtml: + Type: Api + Properties: + RestApiId: Api + Path: / + Method: get + + Api: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + DefinitionUri: s3://sam-demo-bucket/webpage_swagger.json + Description: my description + Mode: overwrite diff --git a/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml b/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml index 1ffd32bd6a..7efa33f629 100644 --- a/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_open_api_version.yaml @@ -16,6 +16,11 @@ Resources: Properties: Path: / Method: get + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any ExplicitApi: Type: AWS::Serverless::Api Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml b/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml index 688344e032..52e6530326 100644 --- a/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_open_api_version_2.yaml @@ -16,6 +16,11 @@ Resources: Properties: Path: / Method: get + AnyHtml: + Type: Api + Properties: + Path: /any + Method: any ExplicitApi: Type: AWS::Serverless::Api Properties: diff --git a/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml b/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml index ac79e312c5..e1799d3e70 100644 --- a/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_path_parameters.yaml @@ -12,6 +12,12 @@ Resources: RestApiId: HtmlApi Path: /{prameter}/resources Method: get + AnyHtml: + Type: Api + Properties: + RestApiId: HtmlApi + Path: /any/{prameter}/resources + Method: any HtmlApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml b/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml index fb9071db25..2c34783842 100644 --- a/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_resource_policy.yaml @@ -37,5 +37,12 @@ Resources: Ref: ExplicitApi Path: /three Method: put + AnyHtml: + Type: Api + Properties: + RestApiId: + Ref: ExplicitApi + Path: /any + Method: any \ No newline at end of file diff --git a/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml b/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml index d3599c73c4..613f67dc10 100644 --- a/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_resource_policy_global_implicit.yaml @@ -21,3 +21,18 @@ Resources: Action: 'execute-api:blah', Resource: ['execute-api:/*/*/*'] }] + AddItemAnyMethod: + Type: Api + Properties: + Path: /any/add + Method: any + Auth: + ResourcePolicy: + CustomStatements: [{ + Action: 'execute-api:Invoke', + Resource: ['execute-api:/*/*/*'] + }, + { + Action: 'execute-api:blah', + Resource: ['execute-api:/*/*/*'] + }] diff --git a/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml b/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml index 3381677ef2..e84845cbba 100644 --- a/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_resource_refs.yaml @@ -21,6 +21,11 @@ Resources: Properties: Path: /html Method: GET + GetHtmlAnyMethod: + Type: Api + Properties: + Path: /any/html + Method: any Outputs: ImplicitApiDeployment: diff --git a/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml b/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml index 65073bdede..6315a79314 100644 --- a/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_source_vpc_blacklist.yaml @@ -23,4 +23,9 @@ Resources: Properties: Method: Put Path: /get + ApiAnyMethod: + Type: Api + Properties: + Method: any + Path: /any/get diff --git a/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml b/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml index 1cacf39415..f67ea34d8a 100644 --- a/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_source_vpc_whitelist.yaml @@ -31,11 +31,21 @@ Resources: Properties: Method: Put Path: /get + ApiAnyMethod: + Type: Api + Properties: + Method: any + Path: /any/get Fetch: Type: Api Properties: Method: Post Path: /fetch + FetchAnyMethod: + Type: Api + Properties: + Method: any + Path: /any/fetch MyApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml b/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml index af30762da9..1b796e449b 100644 --- a/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_swagger_and_openapi_with_auth.yaml @@ -24,6 +24,11 @@ Resources: Properties: Path: / Method: get + GetHtmlAnyMethod: + Type: Api + Properties: + Path: /any + Method: any ExplicitApi: Type: AWS::Serverless::Api diff --git a/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml b/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml index eb0ae32bea..98173772ec 100644 --- a/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_swagger_authorizer_none.yaml @@ -85,6 +85,14 @@ Resources: Auth: Authorizer: NONE Path: /cognito + CognitoAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithCognitoAuth + Method: any + Auth: + Authorizer: NONE + Path: /any/cognito LambdaToken: Type: Api Properties: @@ -93,6 +101,14 @@ Resources: Auth: Authorizer: NONE Path: /lambda-token + LambdaTokenAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaTokenAuth + Method: any + Auth: + Authorizer: NONE + Path: /any/lambda-token LambdaRequest: Type: Api Properties: @@ -101,6 +117,14 @@ Resources: Authorizer: NONE Method: get Path: /lambda-request + LambdaRequestAnyMethod: + Type: Api + Properties: + RestApiId: !Ref MyApiWithLambdaRequestAuth + Auth: + Authorizer: NONE + Method: any + Path: /any/lambda-request MyUserPool: Type: AWS::Cognito::UserPool diff --git a/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml b/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml index 836d98648b..41b08e493d 100644 --- a/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml +++ b/tests/functional/commands/validate/lib/models/api_with_usageplans.yaml @@ -63,6 +63,13 @@ Resources: Ref: MyApiOne Method: get Path: /path/one + ApiKeyAnyMethod: + Type: Api + Properties: + RestApiId: + Ref: MyApiOne + Method: any + Path: /any/path/one MyFunctionTwo: Type: AWS::Serverless::Function diff --git a/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml new file mode 100644 index 0000000000..f05fe7511b --- /dev/null +++ b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_1.yaml @@ -0,0 +1,61 @@ +Globals: + Api: + Auth: + ApiKeyRequired: true + UsagePlan: + CreateUsagePlan: SHARED + +Resources: + MyApiOne: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + + MyApiTwo: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + + MyFunctionOne: + Type: AWS::Serverless::Function + Properties: + Handler: index.handler + Runtime: nodejs12.x + InlineCode: | + exports.handler = async (event) => { + return { + statusCode: 200, + body: JSON.stringify(event), + headers: {} + } + } + Events: + ApiKey: + Type: Api + Properties: + RestApiId: + Ref: MyApiOne + Method: get + Path: /path/one + + MyFunctionTwo: + Type: AWS::Serverless::Function + Properties: + Handler: index.handler + Runtime: nodejs12.x + InlineCode: | + exports.handler = async (event) => { + return { + statusCode: 200, + body: JSON.stringify(event), + headers: {} + } + } + Events: + ApiKey: + Type: Api + Properties: + RestApiId: + Ref: MyApiTwo + Method: get + Path: /path/two diff --git a/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml new file mode 100644 index 0000000000..857e387692 --- /dev/null +++ b/tests/functional/commands/validate/lib/models/api_with_usageplans_shared_no_side_effect_2.yaml @@ -0,0 +1,34 @@ +Globals: + Api: + Auth: + ApiKeyRequired: true + UsagePlan: + CreateUsagePlan: SHARED + +Resources: + MyApiFour: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + + MyFunctionFour: + Type: AWS::Serverless::Function + Properties: + Handler: index.handler + Runtime: nodejs12.x + InlineCode: | + exports.handler = async (event) => { + return { + statusCode: 200, + body: JSON.stringify(event), + headers: {} + } + } + Events: + ApiKey: + Type: Api + Properties: + RestApiId: + Ref: MyApiFour + Method: get + Path: /path/four From 6b7ec4f0719d611c944295f180f9837c5e2736f9 Mon Sep 17 00:00:00 2001 From: _sam <3804518+aahung@users.noreply.github.com> Date: Tue, 20 Jul 2021 16:16:16 -0700 Subject: [PATCH 17/24] ci: Update expected Jenkins file in pipeline integ test (#3090) --- tests/integration/testdata/pipeline/expected_jenkinsfile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/integration/testdata/pipeline/expected_jenkinsfile b/tests/integration/testdata/pipeline/expected_jenkinsfile index 7a213a30f9..0271ca633e 100644 --- a/tests/integration/testdata/pipeline/expected_jenkinsfile +++ b/tests/integration/testdata/pipeline/expected_jenkinsfile @@ -145,6 +145,11 @@ pipeline { // } // } + // uncomment this to have a manual approval step before deployment to production + // stage('production-deployment-approval'){ + // input "Do you want to deploy to production environment?" + // } + stage('deploy-prod') { when { branch env.MAIN_BRANCH From 9206b47ccca304e6cbaf627063fee2fba035d58c Mon Sep 17 00:00:00 2001 From: _sam <3804518+aahung@users.noreply.github.com> Date: Wed, 21 Jul 2021 06:04:53 -0700 Subject: [PATCH 18/24] chore: Refine pipeline help text and update unit test (#3091) * Update --bucket help text * Update --stage help text * Update help text * Update help text * Update help text * Update help text * Update help text * Update jenkins generated files * Update some intro texts * Remove trialing spaces --- samcli/commands/pipeline/bootstrap/cli.py | 41 +++++++++++-------- samcli/commands/pipeline/init/cli.py | 16 ++++---- .../pipeline/init/interactive_init_flow.py | 8 ++-- .../testdata/pipeline/expected_jenkinsfile | 9 ++-- 4 files changed, 40 insertions(+), 34 deletions(-) diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py index 9e8b454992..4c32ebc9b3 100644 --- a/samcli/commands/pipeline/bootstrap/cli.py +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -17,12 +17,11 @@ from .guided_context import GuidedContext from ..external_links import CONFIG_AWS_CRED_ON_CICD_URL -SHORT_HELP = "Generates the necessary AWS resources to connect your CI/CD system." +SHORT_HELP = "Generates the required AWS resources to connect your CI/CD system." HELP_TEXT = """ -SAM Pipeline Bootstrap generates the necessary AWS resources to connect your -CI/CD system. This step must be completed for each pipeline stage prior to -running sam pipeline init +This command generates the required AWS infrastructure resources to connect to your CI/CD system. +This step must be run for each deployment stage in your pipeline, prior to running the sam pipline init command. """ PIPELINE_CONFIG_DIR = os.path.join(".aws-sam", "pipeline") @@ -39,29 +38,33 @@ ) @click.option( "--stage", - help="The name of the corresponding stage. It is used as a suffix for the created resources.", + help="The name of the corresponding deployment stage. " + "It is used as a suffix for the created AWS infrastructure resources.", required=False, ) @click.option( "--pipeline-user", - help="An IAM user generated or referenced by sam pipeline bootstrap in order to " - "allow the connected CI/CD system to connect to the SAM CLI.", + help="The Amazon Resource Name (ARN) of the IAM user having its access key ID and secret access key " + "shared with the CI/CD system. It is used to grant this IAM user permission to access the " + "corresponding AWS account. If not provided, the command will create one along with the access " + "key ID and secret access key credentials.", required=False, ) @click.option( "--pipeline-execution-role", - help="Execution role that the CI/CD system assumes in order to make changes to resources on your behalf.", + help="The ARN of the IAM role to be assumed by the pipeline user to operate on this stage. " + "Provide it only if you want to use your own role, otherwise this command will create one.", required=False, ) @click.option( "--cloudformation-execution-role", - help="Execution role that CloudFormation assumes in order to make changes to resources on your behalf", + help="The ARN of the IAM role to be assumed by the AWS CloudFormation service while deploying the " + "application's stack. Provide only if you want to use your own role, otherwise the command will create one.", required=False, ) @click.option( "--bucket", - help="The name of the S3 bucket where this command uploads your CloudFormation template. This is required for" - "deployments of templates sized greater than 51,200 bytes.", + help="The ARN of the Amazon S3 bucket to hold the AWS SAM artifacts.", required=False, ) @click.option( @@ -73,14 +76,16 @@ ) @click.option( "--image-repository", - help="ECR repo uri where this command uploads the image artifacts that are referenced in your template.", + help="The ARN of an Amazon ECR image repository to hold the container images of Lambda functions or " + "layers that have a package type of Image. If provided, the --create-image-repository options is ignored. " + "If not provided and --create-image-repository is specified, the command will create one.", required=False, ) @click.option( "--confirm-changeset/--no-confirm-changeset", default=True, is_flag=True, - help="Prompt to confirm if the resources is to be deployed by SAM CLI.", + help="Prompt to confirm if the resources are to be deployed.", ) @common_options @aws_creds_options @@ -150,10 +155,12 @@ def do_cli( dedent( """\ - sam pipeline bootstrap generates the necessary AWS resources to connect a stage in - your CI/CD system. We will ask for [1] stage definition, [2] account details, and - [3] references to existing resources in order to bootstrap these pipeline - resources. + sam pipeline bootstrap generates the required AWS infrastructure resources to connect + to your CI/CD system. This step must be run for each deployment stage in your pipeline, + prior to running the sam pipeline init command. + + We will ask for [1] stage definition, [2] account details, and + [3] references to existing resources in order to bootstrap these pipeline resources. """ ), ) diff --git a/samcli/commands/pipeline/init/cli.py b/samcli/commands/pipeline/init/cli.py index bcbe205c6a..a7223398c9 100644 --- a/samcli/commands/pipeline/init/cli.py +++ b/samcli/commands/pipeline/init/cli.py @@ -10,14 +10,14 @@ from samcli.commands.pipeline.init.interactive_init_flow import InteractiveInitFlow from samcli.lib.telemetry.metric import track_command -SHORT_HELP = "Generates CI/CD pipeline configuration files." +SHORT_HELP = "Generates a CI/CD pipeline configuration file." HELP_TEXT = """ -sam pipeline init generates a pipeline configuration file that you can use to connect your -AWS account(s) to your CI/CD system. Before using sam pipeline init, you must -bootstrap the necessary resources for each stage in your pipeline. You can do this -by running sam pipeline init --bootstrap to be guided through the setup and configuration -file generation process, or refer to resources you have previously created with the -sam pipeline bootstrap command. +This command generates a pipeline configuration file that your CI/CD system can use to deploy +serverless applications using AWS SAM. + +Before using sam pipeline init, you must bootstrap the necessary resources for each stage in your pipeline. +You can do this by running sam pipeline init --bootstrap to be guided through the setup and configuration +file generation process, or refer to resources you have previously created with the sam pipeline bootstrap command. """ @@ -27,7 +27,7 @@ "--bootstrap", is_flag=True, default=False, - help="Allow bootstrapping resources.", + help="Enable interactive mode that walks the user through creating necessary AWS infrastructure resources.", ) @cli_framework_options @pass_context diff --git a/samcli/commands/pipeline/init/interactive_init_flow.py b/samcli/commands/pipeline/init/interactive_init_flow.py index 7504f3a66b..d4e989ebfa 100644 --- a/samcli/commands/pipeline/init/interactive_init_flow.py +++ b/samcli/commands/pipeline/init/interactive_init_flow.py @@ -57,10 +57,10 @@ def do_interactive(self) -> None: dedent( """\ - sam pipeline init generates a pipeline configuration file that you can use to connect your - AWS account(s) to your CI/CD system. We will guide you through the process to - bootstrap resources for each stage, then walk through the details necessary for - creating the pipeline config file. + sam pipeline init generates a pipeline configuration file that your CI/CD system + can use to deploy serverless applications using AWS SAM. + We will guide you through the process to bootstrap resources for each stage, + then walk through the details necessary for creating the pipeline config file. Please ensure you are in the root folder of your SAM application before you begin. """ diff --git a/tests/integration/testdata/pipeline/expected_jenkinsfile b/tests/integration/testdata/pipeline/expected_jenkinsfile index 0271ca633e..14adf3b92b 100644 --- a/tests/integration/testdata/pipeline/expected_jenkinsfile +++ b/tests/integration/testdata/pipeline/expected_jenkinsfile @@ -145,11 +145,6 @@ pipeline { // } // } - // uncomment this to have a manual approval step before deployment to production - // stage('production-deployment-approval'){ - // input "Do you want to deploy to production environment?" - // } - stage('deploy-prod') { when { branch env.MAIN_BRANCH @@ -160,6 +155,10 @@ pipeline { } } steps { + // uncomment this to have a manual approval step before deployment to production + // timeout(time: 24, unit: 'HOURS') { + // input 'Please confirm before starting production deployment' + // } withAWS( credentials: env.PIPELINE_USER_CREDENTIAL_ID, region: env.PROD_REGION, From aae1e97c5eeadb0788aea1b291603fc0b5dddaa3 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 10:55:15 -0700 Subject: [PATCH 19/24] Clearing pipeline integ test buckets with versioned objects. (#3094) * Clearing pipeline integ test buckets with versioned objects. * Fixing black formatting. Co-authored-by: Tarun Mall --- tests/integration/pipeline/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py index f82d27e357..af47374e93 100644 --- a/tests/integration/pipeline/base.py +++ b/tests/integration/pipeline/base.py @@ -82,9 +82,11 @@ def _cleanup_s3_buckets(self, stack_name): for resource in stack_resources["StackResources"] if resource["ResourceType"] == "AWS::S3::Bucket" ] - s3_client = boto3.client("s3") + session = boto3.session.Session() + s3_client = session.resource("s3") for bucket in buckets: - s3_client.delete_bucket(Bucket=bucket.get("PhysicalResourceId")) + bucket = s3_client.Bucket(bucket) + bucket.object_versions.delete() except botocore.exceptions.ClientError: """No need to fail in cleanup""" From 8bbb7c5ca7366242c53cc66e9181e073b9fcbfd2 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 11:22:35 -0700 Subject: [PATCH 20/24] Fixing bug in bucket cleanup. (#3096) Co-authored-by: Tarun Mall --- tests/integration/pipeline/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py index af47374e93..9a52820b63 100644 --- a/tests/integration/pipeline/base.py +++ b/tests/integration/pipeline/base.py @@ -85,7 +85,7 @@ def _cleanup_s3_buckets(self, stack_name): session = boto3.session.Session() s3_client = session.resource("s3") for bucket in buckets: - bucket = s3_client.Bucket(bucket) + bucket = s3_client.Bucket(bucket.get("PhysicalResourceId")) bucket.object_versions.delete() except botocore.exceptions.ClientError: """No need to fail in cleanup""" From 172616c906c1cf64f8357ffa0d07ed8f9d96c13d Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 11:43:03 -0700 Subject: [PATCH 21/24] Deleting bucket (#3097) Co-authored-by: Tarun Mall --- tests/integration/pipeline/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py index 9a52820b63..e776613f33 100644 --- a/tests/integration/pipeline/base.py +++ b/tests/integration/pipeline/base.py @@ -87,6 +87,7 @@ def _cleanup_s3_buckets(self, stack_name): for bucket in buckets: bucket = s3_client.Bucket(bucket.get("PhysicalResourceId")) bucket.object_versions.delete() + bucket.delete() except botocore.exceptions.ClientError: """No need to fail in cleanup""" From 1916bfa354b5d2612bd1bf9efd54a77e2bc66ff6 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 12:37:41 -0700 Subject: [PATCH 22/24] Revert "temp: disable testing against python 3.8, and enabled 3.7 (#3009)" (#3098) This reverts commit fe832185be09acb199b2a09ad73bf59e1553d131. Co-authored-by: Tarun Mall --- appveyor.yml | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index ed730e0a24..b96017d0d3 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -9,26 +9,26 @@ environment: matrix: - - PYTHON_HOME: "C:\\Python36-x64" - PYTHON_VERSION: '3.6' - PYTHON_ARCH: '64' - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_37_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_36' - AWS_ECR: 'AWS_ECR_36' - APPVEYOR_CONSOLE_DISABLE_PTY: true - - - PYTHON_HOME: "C:\\Python37-x64" - PYTHON_VERSION: '3.7' - PYTHON_ARCH: '64' - RUN_SMOKE: 1 - NOSE_PARAMETERIZED_NO_WARN: 1 - INSTALL_PY_36_PIP: 1 - INSTALL_PY_38_PIP: 1 - AWS_S3: 'AWS_S3_37' - AWS_ECR: 'AWS_ECR_37' - APPVEYOR_CONSOLE_DISABLE_PTY: true + # - PYTHON_HOME: "C:\\Python36-x64" + # PYTHON_VERSION: '3.6' + # PYTHON_ARCH: '64' + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_37_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_36' + # AWS_ECR: 'AWS_ECR_36' + # APPVEYOR_CONSOLE_DISABLE_PTY: true + + # - PYTHON_HOME: "C:\\Python37-x64" + # PYTHON_VERSION: '3.7' + # PYTHON_ARCH: '64' + # RUN_SMOKE: 1 + # NOSE_PARAMETERIZED_NO_WARN: 1 + # INSTALL_PY_36_PIP: 1 + # INSTALL_PY_38_PIP: 1 + # AWS_S3: 'AWS_S3_37' + # AWS_ECR: 'AWS_ECR_37' + # APPVEYOR_CONSOLE_DISABLE_PTY: true - PYTHON_HOME: "C:\\Python38-x64" PYTHON_VERSION: '3.8' From 50ef1244b60aae104bd5eff6430338f417821c4d Mon Sep 17 00:00:00 2001 From: Raymond Wang <14915548+wchengru@users.noreply.github.com> Date: Wed, 21 Jul 2021 15:10:28 -0700 Subject: [PATCH 23/24] chore: bump SAM CLI version to 1.27.0 (#3101) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index 1c484dccfd..15c38c4850 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.26.0" +__version__ = "1.27.0" From 1c42df379b442bd8fa16fcc6090035bdc884aa56 Mon Sep 17 00:00:00 2001 From: Tarun Date: Wed, 21 Jul 2021 16:55:04 -0700 Subject: [PATCH 24/24] Add pipeline to pyinstaller (#3103) * Adding pipeline to pyinstaller. Co-authored-by: Tarun Mall --- installer/pyinstaller/hook-samcli.py | 3 +++ samcli/__init__.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/installer/pyinstaller/hook-samcli.py b/installer/pyinstaller/hook-samcli.py index 2a68112a8a..72a939a9b2 100644 --- a/installer/pyinstaller/hook-samcli.py +++ b/installer/pyinstaller/hook-samcli.py @@ -13,6 +13,9 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", + "samcli.commands.pipeline.pipeline", + "samcli.commands.pipeline.init", + "samcli.commands.pipeline.bootstrap", # default hidden import 'pkg_resources.py2_warn' is added # since pyInstaller 4.0. "pkg_resources.py2_warn", diff --git a/samcli/__init__.py b/samcli/__init__.py index 15c38c4850..572d288049 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.27.0" +__version__ = "1.27.1"